Skip to content
Merged
3 changes: 2 additions & 1 deletion apps/labrinth/.env.docker-compose
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ DATABASE_URL=postgresql://labrinth:labrinth@labrinth-postgres/labrinth
DATABASE_MIN_CONNECTIONS=0
DATABASE_MAX_CONNECTIONS=16

MEILISEARCH_ADDR=http://labrinth-meilisearch:7700
MEILISEARCH_READ_ADDR=http://localhost:7700
MEILISEARCH_WRITE_ADDRS=http://localhost:7700
MEILISEARCH_KEY=modrinth

REDIS_URL=redis://labrinth-redis
Expand Down
8 changes: 7 additions & 1 deletion apps/labrinth/.env.local
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,13 @@ DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth
DATABASE_MIN_CONNECTIONS=0
DATABASE_MAX_CONNECTIONS=16

MEILISEARCH_ADDR=http://localhost:7700
MEILISEARCH_READ_ADDR=http://localhost:7700
MEILISEARCH_WRITE_ADDRS=http://localhost:7700

# # For a sharded Meilisearch setup (sharded-meilisearch docker compose profile)
# MEILISEARCH_READ_ADDR=http://localhost:7710
# MEILISEARCH_WRITE_ADDRS=http://localhost:7700,http://localhost:7701

MEILISEARCH_KEY=modrinth

REDIS_URL=redis://localhost
Expand Down
14 changes: 14 additions & 0 deletions apps/labrinth/nginx/meili-lb.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
upstream meilisearch_upstream {
server meilisearch0:7700;
server meilisearch1:7700;
}

server {
listen 80;

location / {
proxy_pass http://meilisearch_upstream;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}
11 changes: 10 additions & 1 deletion apps/labrinth/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ use crate::background_task::update_versions;
use crate::database::ReadOnlyPgPool;
use crate::queue::billing::{index_billing, index_subscriptions};
use crate::queue::moderation::AutomatedModerationQueue;
use crate::search::MeilisearchReadClient;
use crate::util::anrok;
use crate::util::archon::ArchonClient;
use crate::util::env::{parse_strings_from_var, parse_var};
Expand Down Expand Up @@ -68,6 +69,7 @@ pub struct LabrinthConfig {
pub email_queue: web::Data<EmailQueue>,
pub archon_client: web::Data<ArchonClient>,
pub gotenberg_client: GotenbergClient,
pub search_read_client: web::Data<MeilisearchReadClient>,
}

#[allow(clippy::too_many_arguments)]
Expand Down Expand Up @@ -274,6 +276,11 @@ pub fn app_setup(
file_host,
scheduler: Arc::new(scheduler),
ip_salt,
search_read_client: web::Data::new(
search_config.make_loadbalanced_read_client().expect(
"Failed to make Meilisearch client for read operations",
),
),
search_config,
session_queue,
payouts_queue: web::Data::new(PayoutsQueue::new()),
Expand Down Expand Up @@ -325,6 +332,7 @@ pub fn app_config(
.app_data(labrinth_config.archon_client.clone())
.app_data(web::Data::new(labrinth_config.stripe_client.clone()))
.app_data(web::Data::new(labrinth_config.anrok_client.clone()))
.app_data(labrinth_config.search_read_client.clone())
.app_data(labrinth_config.rate_limiter.clone())
.configure({
#[cfg(target_os = "linux")]
Expand Down Expand Up @@ -373,7 +381,8 @@ pub fn check_env_vars() -> bool {
failed |= check_var::<String>("LABRINTH_EXTERNAL_NOTIFICATION_KEY");
failed |= check_var::<String>("RATE_LIMIT_IGNORE_KEY");
failed |= check_var::<String>("DATABASE_URL");
failed |= check_var::<String>("MEILISEARCH_ADDR");
failed |= check_var::<String>("MEILISEARCH_READ_ADDR");
failed |= check_var::<String>("MEILISEARCH_WRITE_ADDRS");
failed |= check_var::<String>("MEILISEARCH_KEY");
failed |= check_var::<String>("REDIS_URL");
failed |= check_var::<String>("BIND_ADDR");
Expand Down
7 changes: 5 additions & 2 deletions apps/labrinth/src/routes/v2/projects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ use crate::queue::moderation::AutomatedModerationQueue;
use crate::queue::session::AuthQueue;
use crate::routes::v3::projects::ProjectIds;
use crate::routes::{ApiError, v2_reroute, v3};
use crate::search::{SearchConfig, SearchError, search_for_project};
use crate::search::{
MeilisearchReadClient, SearchConfig, SearchError, search_for_project,
};
use actix_web::{HttpRequest, HttpResponse, delete, get, patch, post, web};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
Expand Down Expand Up @@ -54,6 +56,7 @@ pub fn config(cfg: &mut web::ServiceConfig) {
pub async fn project_search(
web::Query(info): web::Query<SearchRequest>,
config: web::Data<SearchConfig>,
read_client: web::Data<MeilisearchReadClient>,
) -> Result<HttpResponse, SearchError> {
// Search now uses loader_fields instead of explicit 'client_side' and 'server_side' fields
// While the backend for this has changed, it doesnt affect much
Expand Down Expand Up @@ -99,7 +102,7 @@ pub async fn project_search(
..info
};

let results = search_for_project(&info, &config).await?;
let results = search_for_project(&info, &config, &read_client).await?;

let results = LegacySearchResults::from(results);

Expand Down
7 changes: 5 additions & 2 deletions apps/labrinth/src/routes/v3/projects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ use crate::queue::moderation::AutomatedModerationQueue;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::search::indexing::remove_documents;
use crate::search::{SearchConfig, SearchError, search_for_project};
use crate::search::{
MeilisearchReadClient, SearchConfig, SearchError, search_for_project,
};
use crate::util::img;
use crate::util::img::{delete_old_images, upload_image_optimized};
use crate::util::routes::read_limited_from_payload;
Expand Down Expand Up @@ -1018,8 +1020,9 @@ pub async fn edit_project_categories(
pub async fn project_search(
web::Query(info): web::Query<SearchRequest>,
config: web::Data<SearchConfig>,
read_client: web::Data<MeilisearchReadClient>,
) -> Result<HttpResponse, SearchError> {
let results = search_for_project(&info, &config).await?;
let results = search_for_project(&info, &config, &read_client).await?;

// TODO: add this back
// let results = ReturnSearchResults {
Expand Down
Loading