diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml new file mode 100644 index 00000000..46fb335f --- /dev/null +++ b/.github/workflows/checks.yml @@ -0,0 +1,27 @@ +name: Checks + +on: + push: + pull_request: + branches: + - develop + +jobs: + checks: + + runs-on: ubuntu-latest + container: + image: docker/tilt:latest + + steps: + - uses: actions/checkout@v4 + + - uses: azure/setup-helm@v4.3.1 + + - name: Create k8s Kind Cluster + run: ctlptl create cluster kind --registry=ctlptl-registry + + - name: Test Using Local Config + run: tilt ci --timeout "5m" + + diff --git a/README.MD b/README.MD index 160d8014..fbe070e4 100644 --- a/README.MD +++ b/README.MD @@ -69,15 +69,21 @@ This repository aims to provide a ready-to-go [Tilt](https://docs.tilt.dev/) set ## Prerequisites * [Tilt](https://docs.tilt.dev/install.html) * Kubernetes ([Docker Desktop](https://docs.docker.com/desktop/kubernetes/) or [Minikube](https://minikube.sigs.k8s.io/docs/) are recommended for those new to kubernetes) [Tilt cluster setup](https://docs.tilt.dev/choosing_clusters) -* For Windows users a bash program. By default git bash is used with an assumed installation directory of C:/Program Files/Git * [helm](https://helm.sh/docs/intro/install/) must be installed to generate the k8s from the helm charts in the gitops repo +* For Windows users: + * A bash program. By default git bash is used with an assumed installation directory of C:/Program Files/Git ## Startup Services In the root directory of the repository run `tilt up`. This will start all the faf services in the correct order. The status of each service can be viewed in the tilt UI by visiting . This is the control server for tilt where you can restart services or disable them for substitution by services you would like to run from source code as you actively develop them. ## Development -To develop against the FAF infrastructure you should disable the service in tilt that you are actively developing. Once disabled you can start up your developed version. Some tweaks may need to be made to the default configuration parameters in the source code. The proper values can be found in the configMaps in each of the services kubernetes deploy yaml files. +To develop against the FAF infrastructure you can insert the service in tilt that you are actively developing by adding it to the local-services list in your tilt_config.json file or as --local-services when you run tilt up. This will proxy the connections in the cluster to that service to your locally running instance. Some tweaks may need to be made to the default configuration parameters in the source code. The proper values can be found in the configMaps in each of the services kubernetes deploy yaml files. You may need to specify your host-ip in the config or arguments in order to get the proxied connections to work ## Test Data -The default test data that is loaded can be found in /sql/test-data.sql. This can be overridden by providing a new path with the tilt configuration key test-data-path when running tilt up or in the tilt_config.json file in the repository root directory. +The default test data that is loaded can be found in [faf-db](https://github.com/FAForever/db/blob/develop/test-data.sql). +You can load the data at anytime by manually triggering the popualte-db resource under the Database label. This needs to be done the first time for data to exist. + +## Endpoints +Locally traefik uses the same ingress routes as we do in production just with a default base domain of faforever.localhost. So accessible domains will be api.faforever.localhost, hydra.faforever.localhost etc. By default these should resolve to 127.0.0.1. If they do not you can manually modify your hosts file to add the local dns records. For convenience the resource create-hosts-file-content +will output the necessary lines for the hosts file when run after every service has started. diff --git a/Tiltfile b/Tiltfile index a4ab8816..ce75d7e9 100644 --- a/Tiltfile +++ b/Tiltfile @@ -1,29 +1,44 @@ # This file is used for local development with tilt from https://tilt.dev/ in order to stand up the faf k8s stack from scratch config.define_string("windows-bash-path", args=False, usage="Path to bash.exe for windows") -config.define_string("default_pull_policy", args=False, usage="Pull policy to use for containers") +config.define_string("default-pull-policy", args=False, usage="Pull policy to use for containers") +config.define_string("host-ip", args=False, usage="IP Address of the host to enable redirection to local services") +config.define_string("hostname", args=False, usage="Accessible name of the host to enable redirection to local services") +config.define_string("faf-data-dir", args=False, usage="Directory where the FAF Data lives normally C:/ProgramData/FAForever on Windows or ~/.faforever on Linux") +config.define_string("base-domain", args=False, usage="Base Domain to use for all faf services. Defaults to faforever.localhost") +config.define_string_list("local-services", args=False, usage="Names of services that you intend to run locally") cfg = config.parse() windows_bash_path = cfg.get("windows-bash-path", "C:\\Program Files\\Git\\bin\\bash.exe") +host_ip = cfg.get("host-ip", "") +local_services = cfg.get("local-services", []) +base_domain = cfg.get("base-domain", "faforever.localhost") data_relative_path = ".local-data" if os.name == "nt": + faf_data_dir = cfg.get("faf-data-dir", "C:/ProgramData/FAForever") if not os.path.exists(windows_bash_path): fail("Windows users need to supply a valid path to a bash executable") if k8s_context() == "docker-desktop": + hostname = cfg.get("hostname", "host.docker.internal") drive, path_without_drive = os.getcwd().split(":") data_absolute_path = os.path.join("//run/desktop/mnt/host/", drive, path_without_drive, data_relative_path).replace("\\","/").lower() use_named_volumes = ["mariadb"] elif k8s_context() == "minikube": + hostname = cfg.get("hostname", "") data_absolute_path = "//data/" + data_relative_path use_named_volumes = [] else: fail("Cannot determine how to mount for windows host") else: + faf_data_dir = cfg.get("faf-data-dir", "~/.faforever") + hostname = cfg.get("hostname", "") data_absolute_path = os.path.join(os.getcwd(), data_relative_path) use_named_volumes = [] +host_details = ["hostIP="+host_ip, "hostname="+hostname] + def as_windows_command(command): if type(command) == "list": return [windows_bash_path, "-c"] + [" ".join(command)] @@ -73,19 +88,20 @@ def cronjob_to_job(yaml): return encode_yaml_stream(objects) -def helm_with_build_cache(chart, namespace="", values=[], set=[]): +def helm_with_build_cache(chart, namespace="", values=[], set=[], specifier = ""): cache_dir = ".helm-cache" - chart_resource = chart.replace("/", "-") - chart_cache_path = os.path.join(cache_dir, chart) + chart_resource = chart.replace("/", "-") + "-" + specifier + chart_cache_path = os.path.join(cache_dir, chart, specifier).replace("\\", "/") cached_yaml = os.path.join(chart_cache_path, "yaml") value_flags = [fragment for value in values for fragment in ("--values", value)] set_flags = [fragment for set_value in set for fragment in ("--set", set_value)] - command = ["./tilt/scripts/helm-with-cache.sh", cache_dir, chart, "--include-crds"] + command = ["./tilt/scripts/helm-with-cache.sh", chart_cache_path, chart, "--include-crds"] if namespace: command.extend(["--namespace", namespace]) command.extend(value_flags) command.extend(set_flags) + command.extend(["--set", "baseDomain=" + base_domain]) deps = [chart] deps.extend(values) @@ -106,21 +122,34 @@ def helm_with_build_cache(chart, namespace="", values=[], set=[]): if "namespace" not in object["metadata"]: object["metadata"]["namespace"] = namespace + default_pull_policy = cfg.get("default-pull-policy", "IfNotPresent") + for object in objects: - spec = object.get("spec", {}).get("template", {}).get("spec", {}) - containers = spec.get("containers", []) + spec = object.get("spec", {}) + template_spec = spec.get("template", {}).get("spec", {}) + containers = template_spec.get("containers", []) for container in containers: - container["imagePullPolicy"] = cfg.get("default_pull_policy", "IfNotPresent") - init_containers = spec.get("initContainers", []) + container["imagePullPolicy"] = default_pull_policy + init_containers = template_spec.get("initContainers", []) for init_container in init_containers: - init_container["imagePullPolicy"] = cfg.get("default_pull_policy", "IfNotPresent") + init_container["imagePullPolicy"] = default_pull_policy job_template_spec = object.get("spec", {}).get("jobTemplate", {}).get("spec", {}).get("template", {}).get("spec", {}) job_template_containers = job_template_spec.get("containers", []) for container in job_template_containers: - container["imagePullPolicy"] = cfg.get("default_pull_policy", "IfNotPresent") + container["imagePullPolicy"] = default_pull_policy job_template_init_containers = job_template_spec.get("initContainers", []) for init_container in job_template_init_containers: - init_container["imagePullPolicy"] = cfg.get("default_pull_policy", "IfNotPresent") + init_container["imagePullPolicy"] = default_pull_policy + if "entryPoints" in spec: + entryPoints = spec["entryPoints"] + if "websecure" in entryPoints: + entryPoints.append("web") + if containers or job_template_containers: + metadata = object["metadata"] + if "annotations" not in metadata or not metadata["annotations"]: + metadata["annotations"] = {} + + metadata["annotations"]["reloader.stakater.com/auto"] = "true" return encode_yaml_stream(objects) @@ -154,12 +183,71 @@ def no_policy_server(yaml): return encode_yaml_stream(objects) +def proxy_local_service_if_set(service_name, service_chart, service_namespace, additional_values=[], config_patch={}, service_deps=[], service_labels=[], service_links=[]): + all_service_deps = [] + all_service_deps.extend(service_deps) + service_objects = [] + + service_yaml = helm_with_build_cache(service_chart, namespace=service_namespace, values=["config/local.yaml"] + additional_values) + service_config_yaml, service_yaml = filter_yaml(service_yaml, kind="ConfigMap|Secret") + service_config_yaml = patch_config(yaml=service_config_yaml, config_name=service_name, config=config_patch) + if (service_config_yaml): + if service_name == "faf-lobby-server": + service_config_yaml = no_policy_server(service_config_yaml) + k8s_yaml(service_config_yaml) + config_objects = [] + for object in decode_yaml_stream(service_config_yaml): + config_objects.append(object["metadata"]["name"] + ":" + object["kind"].lower()) + service_config_name = service_name+"-config" + k8s_resource(new_name=service_config_name, objects=config_objects, labels=service_labels) + all_service_deps.append(service_config_name) + + service_traefik_yaml, service_yaml = filter_yaml(service_yaml, api_version="traefik.io/v1alpha1") + if (service_traefik_yaml): + k8s_yaml(service_traefik_yaml) + + for object in decode_yaml_stream(service_traefik_yaml): + service_objects.append(object["metadata"]["name"] + ":" + object["kind"].lower()) + all_service_deps.append("traefik") + + if (service_name in local_services): + service_ingress_details = extract_ingress_details(service_traefik_yaml) + if not service_ingress_details: + service_ingress_details = ["name="+service_name] + service_proxy_yaml = helm_with_build_cache("tilt/helm/host-proxy", specifier=service_name, namespace=service_namespace, values=["config/local.yaml"], set=host_details+service_ingress_details) + k8s_yaml(service_proxy_yaml) + for object in decode_yaml_stream(service_proxy_yaml): + service_objects.append(object["metadata"]["name"] + ":" + object["kind"].lower()) + k8s_resource(new_name=service_name, objects=service_objects, resource_deps=all_service_deps, labels=service_labels, links=user_service_links, pod_readiness="ignore") + else: + if service_name == "faf-icebreaker": + service_yaml = remove_init_container(service_yaml) + + k8s_yaml(service_yaml) + k8s_resource(workload=service_name, objects=service_objects, resource_deps=all_service_deps, labels=service_labels, links=service_links) + + +def extract_ingress_details(yaml): + objects = decode_yaml_stream(yaml) + for object in objects: + if object["kind"] == "IngressRoute": + routes = object["spec"]["routes"] + service = routes[0]["services"][0] + return ["port=" + str(service["port"]), "name=" + service["name"]] + return [] + +agnostic_local_resource("create-hosts-file-content", cmd=["./tilt/scripts/print-hosts.sh"], labels=["core"], auto_init=False, trigger_mode=TRIGGER_MODE_MANUAL, allow_parallel=True) +agnostic_local_resource("populate-featured-mod-files", cmd=["./tilt/scripts/update-faf-featured-mod.sh", faf_data_dir], labels=["database"], resource_deps=["faf-db-migrations"], auto_init=False, trigger_mode=TRIGGER_MODE_MANUAL, allow_parallel=True) + k8s_yaml("cluster/namespaces.yaml") k8s_yaml(helm_with_build_cache("infra/clusterroles", namespace="faf-infra", values=["config/local.yaml"])) -k8s_resource(new_name="namespaces", objects=["faf-infra:namespace", "faf-apps:namespace", "faf-ops:namespace"], labels=["core"]) +k8s_resource(new_name="namespaces", objects=["faf-infra:namespace", "faf-apps:namespace", "faf-ops:namespace", "traefik:namespace"], labels=["core"]) k8s_resource(new_name="clusterroles", objects=["read-cm-secrets:clusterrole"], labels=["core"]) k8s_resource(new_name="init-apps", objects=["init-apps:serviceaccount:faf-infra", "init-apps:serviceaccount:faf-apps", "allow-init-apps-read-app-config-infra:rolebinding", "allow-init-apps-read-app-config-apps:rolebinding"], resource_deps=["clusterroles"], labels=["core"]) +k8s_yaml(helm_with_build_cache("disabled/reloader", namespace="faf-ops", values=["config/local.yaml"])) +k8s_resource(workload="release-name-reloader", new_name="reloader", objects=["release-name-reloader:serviceaccount", "release-name-reloader-metadata-role:role", "release-name-reloader-role:clusterrole", "release-name-reloader-metadata-role-binding:rolebinding", "release-name-reloader-role-binding:clusterrolebinding"], resource_deps=["namespaces"], labels=["core"]) + storage_yaml = helm_with_build_cache("cluster/storage", values=["config/local.yaml"], set=["dataPath="+data_absolute_path]) storage_yaml = to_hostpath_storage(storage_yaml, use_named_volumes=use_named_volumes) k8s_yaml(storage_yaml) @@ -172,7 +260,7 @@ for object in decode_yaml_stream(storage_yaml): k8s_resource(new_name="volumes", objects=volume_identifiers, labels=["core"], trigger_mode=TRIGGER_MODE_MANUAL) -traefik_yaml = helm_with_build_cache("cluster/traefik", values=["config/local.yaml"], namespace="traefik") +traefik_yaml = helm_with_build_cache("cluster/traefik", values=["config/local.yaml", "cluster/traefik/values-local.yaml"], set=["traefik.ingressRoute.dashboard.matchRule='Host(`traefik.{}`)'".format(base_domain)], namespace="traefik") k8s_yaml(traefik_yaml) traefik_identifiers = [] @@ -182,8 +270,8 @@ for object in decode_yaml_stream(traefik_yaml): if kind != "deployment" and kind != "service": traefik_identifiers.append(name + ":" + kind) -k8s_resource(new_name="traefik-setup", objects=traefik_identifiers, labels=["traefik"]) -k8s_resource(workload="release-name-traefik", new_name="traefik", port_forwards=["443:8443"], resource_deps=["traefik-setup"], labels=["traefik"]) +k8s_resource(new_name="traefik-setup", objects=traefik_identifiers, resource_deps=["namespaces"], labels=["traefik"]) +k8s_resource(workload="release-name-traefik", new_name="traefik", port_forwards=["443:8443", "80:8000"], resource_deps=["traefik-setup"], labels=["traefik"]) postgres_yaml = helm_with_build_cache("infra/postgres", namespace="faf-infra", values=["config/local.yaml"]) postgres_init_user_yaml, postgres_resource_yaml = filter_yaml(postgres_yaml, {"app": "postgres-sync-db-user"}) @@ -222,7 +310,7 @@ rabbitmq_yaml = helm_with_build_cache("apps/rabbitmq", namespace="faf-apps", val rabbitmq_init_user_yaml, rabbitmq_resource_yaml = filter_yaml(rabbitmq_yaml, {"app": "rabbitmq-sync-user"}) k8s_yaml(rabbitmq_init_user_yaml) k8s_yaml(rabbitmq_resource_yaml) -k8s_resource(workload="rabbitmq", objects=["rabbitmq:configmap", "rabbitmq:secret"], port_forwards=["15672"], resource_deps=["volumes"], labels=["rabbitmq"]) +k8s_resource(workload="rabbitmq", objects=["rabbitmq:configmap", "rabbitmq:secret"], port_forwards=["15672", "5672"], resource_deps=["volumes"], labels=["rabbitmq"]) rabbitmq_setup_resources = [] for object in decode_yaml_stream(rabbitmq_init_user_yaml): rabbitmq_setup_resources.append(object["metadata"]["name"]) @@ -234,88 +322,87 @@ k8s_resource(workload="faf-db-migrations", objects=["faf-db-migrations:secret"], k8s_yaml("tilt/yaml/populate-db.yaml") k8s_resource(workload="populate-db", resource_deps=["faf-db-migrations"], labels=["database"], auto_init=False, trigger_mode=TRIGGER_MODE_MANUAL) -k8s_yaml(keep_objects_of_kind(helm_with_build_cache("apps/faf-voting", namespace="faf-apps", values=["config/local.yaml"]), kinds=["ConfigMap", "Secret"])) -k8s_resource(new_name="faf-voting-config", objects=["faf-voting:configmap", "faf-voting:secret"], labels=["voting"]) - -k8s_yaml(helm_with_build_cache("apps/faf-website", namespace="faf-apps", values=["config/local.yaml", "apps/faf-website/values-prod.yaml"])) -k8s_resource(new_name="faf-website-config", objects=["faf-website:configmap", "faf-website:secret"], labels=["website"]) -k8s_resource(workload="faf-website", objects=["faf-website:ingressroute"], resource_deps=["traefik"], labels=["website"], links=[link("https://www.localhost", "FAForever Website")]) - -# k8s_yaml(helm_with_build_cache("apps/faf-content", namespace="faf-apps", values=["config/local.yaml"])) -# k8s_resource(new_name="faf-content-config", objects=["faf-content:configmap"], labels=["content"]) -# k8s_resource(workload="faf-content", objects=["faf-content:ingressroute", "cors:middleware", "redirect-replay-subdomain:middleware"], resource_deps=["traefik"], labels=["content"], links=[link("https://content.localhost", "FAForever Content")]) - -k8s_yaml(helm_with_build_cache("apps/ergochat", namespace="faf-apps", values=["config/local.yaml"], set=["baseDomain=chat.localhost"])) +k8s_yaml(helm_with_build_cache("apps/ergochat", namespace="faf-apps", values=["config/local.yaml"])) k8s_resource(new_name="ergochat-config", objects=["ergochat:configmap", "ergochat:secret"], labels=["chat"]) k8s_resource(workload="ergochat", objects=["ergochat-webirc:ingressroute"], resource_deps=["traefik"] + mariadb_setup_resources, port_forwards=["8097:8097"], labels=["chat"]) -api_yaml = helm_with_build_cache("apps/faf-api", namespace="faf-apps", values=["config/local.yaml", "apps/faf-api/values-test.yaml"]) -api_yaml = patch_config(api_yaml, "faf-api", {"JWT_FAF_HYDRA_ISSUER": "http://ory-hydra:4444"}) -k8s_yaml(api_yaml) -k8s_resource(new_name="faf-api-config", objects=["faf-api:configmap", "faf-api:secret", "faf-api-mail:configmap"], labels=["api"]) -k8s_resource(workload="faf-api", objects=["faf-api:ingressroute"], port_forwards=["8010"], resource_deps=["faf-api-config", "faf-db-migrations", "traefik", "ory-hydra"] + rabbitmq_setup_resources, labels=["api"], links=[link("https://api.localhost", "FAF API")]) - -k8s_yaml(helm_with_build_cache("apps/faf-league-service", namespace="faf-apps", values=["config/local.yaml"])) -k8s_resource(new_name="faf-league-service-config", objects=["faf-league-service:configmap", "faf-league-service:secret"], labels=["leagues"]) -k8s_resource(workload="faf-league-service", resource_deps=["faf-league-service-config"] + mariadb_setup_resources + rabbitmq_setup_resources, labels=["leagues"]) - -lobby_server_yaml = helm_with_build_cache("apps/faf-lobby-server", namespace="faf-apps", values=["config/local.yaml"]) -lobby_server_yaml = patch_config(lobby_server_yaml, "faf-lobby-server", {"HYDRA_JWKS_URI": "http://ory-hydra:4444/.well-known/jwks.json"}) -lobby_server_yaml = no_policy_server(lobby_server_yaml) -k8s_yaml(lobby_server_yaml) -k8s_resource(new_name="faf-lobby-server-config", objects=["faf-lobby-server:configmap", "faf-lobby-server:secret"], labels=["lobby"]) -k8s_resource(workload="faf-lobby-server", resource_deps=["faf-lobby-server-config", "faf-db-migrations", "ory-hydra"], labels=["lobby"]) - -k8s_yaml(keep_objects_of_kind(helm_with_build_cache("apps/faf-policy-server", namespace="faf-apps"), kinds=["ConfigMap", "Secret"])) -k8s_resource(new_name="faf-policy-server-config", objects=["faf-policy-server:configmap", "faf-policy-server:secret"], labels=["lobby"]) - -k8s_yaml(helm_with_build_cache("apps/faf-replay-server", namespace="faf-apps", values=["config/local.yaml"])) -k8s_resource(new_name="faf-replay-server-config", objects=["faf-replay-server:configmap", "faf-replay-server:secret"], labels=["replay"]) -k8s_resource(workload="faf-replay-server", objects=["faf-replay-server:ingressroute", "faf-replay-server:ingressroutetcp"], port_forwards=["15001:15001"], resource_deps=["faf-replay-server-config", "faf-db-migrations", "traefik"], labels=["replay"]) - -user_service_yaml = helm_with_build_cache("apps/faf-user-service", namespace="faf-apps", values=["config/local.yaml"]) -user_service_yaml = patch_config(user_service_yaml, "faf-user-service", {"HYDRA_TOKEN_ISSUER": "http://ory-hydra:4444", "HYDRA_JWKS_URL": "http://ory-hydra:4444/.well-known/jwks.json", "LOBBY_URL":"ws://localhost:8003", "REPLAY_URL":"ws://localhost:15001"}) -k8s_yaml(user_service_yaml) -k8s_resource(new_name="faf-user-service-config", objects=["faf-user-service:configmap", "faf-user-service:secret", "faf-user-service-mail-templates:configmap"], labels=["user"]) -k8s_resource(workload="faf-user-service", objects=["faf-user-service:ingressroute"], resource_deps=["faf-db-migrations", "traefik", "ory-hydra"], port_forwards=["8080"], labels=["user"], links=[link("https://user.localhost/register", "User Service Registration")]) - k8s_yaml(helm_with_build_cache("apps/wordpress", namespace="faf-apps", values=["config/local.yaml"])) k8s_resource(new_name="wordpress-config", objects=["wordpress:configmap", "wordpress:secret"], labels=["website"]) -k8s_resource(workload="wordpress", objects=["wordpress:ingressroute"], resource_deps=["traefik"], labels=["website"], links=[link("https://direct.localhost", "FAF Wordpress")]) +k8s_resource(workload="wordpress", objects=["wordpress:ingressroute"], resource_deps=["traefik"], labels=["website"], links=[link("http://direct.{}".format(base_domain), "FAF Wordpress")]) k8s_yaml(helm_with_build_cache("apps/wikijs", namespace="faf-apps", values=["config/local.yaml"])) k8s_resource(new_name="wikijs-config", objects=["wikijs:configmap", "wikijs:secret", "wikijs-sso:configmap"], labels=["wiki"]) -k8s_resource(workload="wikijs", objects=["wikijs:ingressroute"], resource_deps=["traefik"], labels=["wiki"], links=[link("https://wiki.localhost", "FAF Wiki")]) +k8s_resource(workload="wikijs", objects=["wikijs:ingressroute"], resource_deps=["traefik"] + postgres_setup_resources, labels=["wiki"], links=[link("http://wiki.{}".format(base_domain), "FAF Wiki")]) k8s_yaml(helm_with_build_cache("apps/nodebb", namespace="faf-apps", values=["config/local.yaml"])) k8s_resource(new_name="nodebb-config", objects=["nodebb:configmap", "nodebb:secret"], labels=["forum"]) -k8s_resource(workload="nodebb", objects=["nodebb:ingressroute"], port_forwards=["4567:4567"], resource_deps=["traefik"] + mongodb_setup_resources, labels=["forum"], links=[link("https://forum.localhost", "FAF Forum")]) - -k8s_yaml(helm_with_build_cache("apps/faf-unitdb", namespace="faf-apps", values=["config/local.yaml"])) -k8s_resource(new_name="faf-unitdb-config", objects=["faf-unitdb:configmap", "faf-unitdb:secret"], labels=["unitdb"]) -k8s_resource(workload="faf-unitdb", objects=["faf-unitdb:ingressroute"], resource_deps=["traefik"], labels=["unitdb"], links=[link("https://unitdb.localhost", "Rackover UnitDB")]) +k8s_resource(workload="nodebb", objects=["nodebb:ingressroute"], resource_deps=["traefik"] + mongodb_setup_resources, labels=["forum"], links=[link("http://forum.{}".format(base_domain), "FAF Forum")]) k8s_yaml(keep_objects_of_kind(helm_with_build_cache("apps/debezium", namespace="faf-apps", values=["config/local.yaml"]), kinds=["ConfigMap", "Secret"])) k8s_resource(new_name="debezium-config", objects=["debezium:configmap", "debezium:secret"], labels=["database"]) -k8s_yaml(helm_with_build_cache("apps/faf-ws-bridge", namespace="faf-apps", values=["config/local.yaml"])) -k8s_resource(workload="faf-ws-bridge", objects=["faf-ws-bridge:ingressroute"], port_forwards=["8003"], resource_deps=["faf-lobby-server", "traefik"], labels=["lobby"]) - -icebreaker_yaml = helm_with_build_cache("apps/faf-icebreaker", namespace="faf-apps", values=["config/local.yaml"]) -icebreaker_yaml = remove_init_container(icebreaker_yaml) -icebreaker_yaml = patch_config(icebreaker_yaml, "faf-icebreaker", {"HYDRA_URL": "http://ory-hydra:4444"}) -k8s_yaml(icebreaker_yaml) -k8s_resource(new_name="faf-icebreaker-config", objects=["faf-icebreaker:configmap", "faf-icebreaker:secret"], labels=["api"]) -k8s_resource(workload="faf-icebreaker", objects=["faf-icebreaker:ingressroute", "faf-icebreaker-stripprefix:middleware"], resource_deps=["faf-db-migrations", "traefik", "ory-hydra"] + rabbitmq_setup_resources, labels=["api"]) - hydra_yaml = helm_with_build_cache("apps/ory-hydra", namespace="faf-apps", values=["config/local.yaml"]) hydra_client_create_yaml, hydra_resources_yaml = filter_yaml(hydra_yaml, {"app": "ory-hydra-create-clients"}) _, hydra_resources_yaml = filter_yaml(hydra_resources_yaml, {"app": "ory-hydra-janitor"}) -hydra_resources_yaml = patch_config(hydra_resources_yaml, "ory-hydra", {"URLS_SELF_ISSUER": "http://ory-hydra:4444", "URLS_SELF_PUBLIC": "http://localhost:4444", "URLS_LOGIN": "http://localhost:8080/oauth2/login", "URLS_CONSENT": "http://localhost:8080/oauth2/consent", "DEV": "true"}) +hydra_resources_yaml = patch_config(hydra_resources_yaml, "ory-hydra", {"URLS_SELF_ISSUER": "http://ory-hydra:4444", "URLS_SELF_PUBLIC": "http://hydra.{}".format(base_domain), "URLS_LOGIN": "http://user.{}/oauth2/login".format(base_domain), "URLS_CONSENT": "http://user.{}/oauth2/consent".format(base_domain), "DEV": "true"}) k8s_yaml(hydra_resources_yaml) k8s_yaml(hydra_client_create_yaml) k8s_resource(new_name="ory-hydra-config", objects=["ory-hydra:configmap", "ory-hydra:secret"], labels=["hydra"]) k8s_resource(workload="ory-hydra-migration", resource_deps=["ory-hydra-config"] + postgres_setup_resources, labels=["hydra"]) k8s_resource(workload="ory-hydra", objects=["ory-hydra:ingressroute"], resource_deps=["ory-hydra-migration", "traefik"], port_forwards=["4444", "4445"], labels=["hydra"]) for object in decode_yaml_stream(hydra_client_create_yaml): - k8s_resource(workload=object["metadata"]["name"], resource_deps=["ory-hydra"], labels=["hydra"]) \ No newline at end of file + k8s_resource(workload=object["metadata"]["name"], resource_deps=["ory-hydra"], labels=["hydra"]) + +k8s_yaml(keep_objects_of_kind(helm_with_build_cache("apps/faf-voting", namespace="faf-apps", values=["config/local.yaml"]), kinds=["ConfigMap", "Secret"])) +k8s_resource(new_name="faf-voting-config", objects=["faf-voting:configmap", "faf-voting:secret"], labels=["voting"]) + +k8s_yaml(keep_objects_of_kind(helm_with_build_cache("apps/faf-policy-server", namespace="faf-apps"), kinds=["ConfigMap", "Secret"])) +k8s_resource(new_name="faf-policy-server-config", objects=["faf-policy-server:configmap", "faf-policy-server:secret"], labels=["lobby"]) + +# k8s_yaml(helm_with_build_cache("apps/faf-content", namespace="faf-apps", values=["config/local.yaml"])) +# k8s_resource(new_name="faf-content-config", objects=["faf-content:configmap"], labels=["content"]) +# k8s_resource(workload="faf-content", objects=["faf-content:ingressroute", "cors:middleware", "redirect-replay-subdomain:middleware"], resource_deps=["traefik"], labels=["content"], links=[link("http://content.{}".format(base_domain), "FAForever Content")]) + +user_service_deps = ["faf-db-migrations", "ory-hydra"] +user_service_labels = ["user"] +user_service_links = [link("http://user.{}/register".format(base_domain), "User Service Registration")] +user_service_patch = {"HYDRA_TOKEN_ISSUER": "http://ory-hydra:4444", "HYDRA_JWKS_URL": "http://ory-hydra:4444/.well-known/jwks.json", "LOBBY_URL":"ws://ws.{}".format(base_domain), "REPLAY_URL":"ws://replay-ws.{}".format(base_domain)} +proxy_local_service_if_set(service_name="faf-user-service", service_chart="apps/faf-user-service", service_namespace="faf-apps", service_deps=user_service_deps, service_labels=user_service_labels, service_links=user_service_links, config_patch=user_service_patch) + +website_deps = ["wordpress"] +website_labels = ["website"] +website_links = [link("http://www.{}".format(base_domain), "FAForever Website")] +website_patch = {"OAUTH_URL": "http://ory-hydra:4444", "OAUTH_PUBLIC_URL": "http://hydra.{}".format(base_domain), "API_URL": "http://faf-api:8010", "WP_URL": "http://wordpress:80"} +proxy_local_service_if_set(service_name="faf-website", service_chart="apps/faf-website", service_namespace="faf-apps", service_deps=website_deps, service_labels=website_labels, service_links=website_links, additional_values=["apps/faf-website/values-prod.yaml"], config_patch=website_patch) + +api_deps = ["faf-db-migrations", "ory-hydra"] + rabbitmq_setup_resources +api_labels = ["api"] +api_links = [link("http://api.{}".format(base_domain), "FAF API")] +api_patch = {"JWT_FAF_HYDRA_ISSUER": "http://ory-hydra:4444", "REPLAY_DOWNLOAD_URL_FORMAT": "http://replays.{}/%s".format(base_domain), "MOD_DOWNLOAD_URL_FORMAT": "http://content.{}/mods/%s".format(base_domain), "MAP_DOWNLOAD_URL_FORMAT": "http://content.{}/maps/%s".format(base_domain), "FEATURED_MOD_URL_FORMAT":"http://content.{}/legacy-featured-mod-files/%s/%s".format(base_domain), "AVATAR_DOWNLOAD_URL_FORMAT":"http://content.{}/faf/avatars/%s".format(base_domain)} +proxy_local_service_if_set(service_name="faf-api", service_chart="apps/faf-api", service_namespace="faf-apps", service_deps=api_deps, service_labels=api_labels, service_links=api_links, config_patch=api_patch) + +league_service_deps = mariadb_setup_resources + rabbitmq_setup_resources +league_service_labels = ["leagues"] +proxy_local_service_if_set(service_name="faf-league-service", service_chart="apps/faf-league-service", service_namespace="faf-apps", service_deps=league_service_deps, service_labels=league_service_labels) + +lobby_server_deps = ["faf-db-migrations", "ory-hydra"] + rabbitmq_setup_resources +lobby_server_labels = ["lobby"] +lobby_server_patch = {"HYDRA_JWKS_URI": "http://ory-hydra:4444/.well-known/jwks.json"} +proxy_local_service_if_set(service_name="faf-lobby-server", service_chart="apps/faf-lobby-server", service_namespace="faf-apps", service_deps=lobby_server_deps, service_labels=lobby_server_labels, config_patch=lobby_server_patch) + +replay_server_deps = ["faf-db-migrations"] +replay_server_labels = ["replay"] +proxy_local_service_if_set(service_name="faf-replay-server", service_chart="apps/faf-replay-server", service_namespace="faf-apps", service_deps=replay_server_deps, service_labels=replay_server_labels) + +unitdb_labels = ["unitdb"] +unitdb_links = [link("http://unitdb.{}".format(base_domain), "Rackover UnitDB")] +proxy_local_service_if_set(service_name="faf-unitdb", service_chart="apps/faf-unitdb", service_namespace="faf-apps", service_labels=unitdb_labels, service_links=unitdb_links) + +ws_bridge_deps = ["faf-lobby-server"] +ws_bridge_labels = ["lobby"] +proxy_local_service_if_set(service_name="faf-ws-bridge", service_chart="apps/faf-ws-bridge", service_namespace="faf-apps", service_deps=ws_bridge_deps, service_labels=ws_bridge_labels) + +icebreaker_deps = ["faf-db-migrations", "ory-hydra"] + rabbitmq_setup_resources +icebreaker_labels = ["api"] +icebreaker_patch = {"HYDRA_URL": "http://ory-hydra:4444", "XIRSYS_ENABLED": "false", "XIRSYS_TURN_ENABLED": "false", "CLOUDFLARE_ENABLED": "false"} +proxy_local_service_if_set(service_name="faf-icebreaker", service_chart="apps/faf-icebreaker", service_namespace="faf-apps", service_deps=icebreaker_deps, service_labels=icebreaker_labels, config_patch=icebreaker_patch) diff --git a/apps/faf-icebreaker/templates/config.yaml b/apps/faf-icebreaker/templates/config.yaml index 44f97b04..5890ab6e 100644 --- a/apps/faf-icebreaker/templates/config.yaml +++ b/apps/faf-icebreaker/templates/config.yaml @@ -9,14 +9,16 @@ data: HYDRA_URL: "https://hydra.{{.Values.baseDomain}}" SELF_URL: "https://ice.{{.Values.baseDomain}}" DB_USERNAME: "faf-icebreaker" + DB_NAME: "faf-icebreaker" DB_URL: "jdbc:mariadb://mariadb:3306/faf-icebreaker?ssl=false" RABBITMQ_HOST: "rabbitmq" RABBITMQ_USER: "faf-icebreaker" RABBITMQ_PORT: "5672" - CLOUDFLARE_ENABLED: "false" + CLOUDFLARE_ENABLED: "true" XIRSYS_ENABLED: "true" XIRSYS_TURN_ENABLED: "true" GEOIPUPDATE_EDITION_IDS: "GeoLite2-City" LOKI_BASE_URL: "http://monitoring-loki-gateway.faf-ops.svc" - FORCE_RELAY: "false" - QUARKUS_LOG_CATEGORY__COM_FAFOREVER__LEVEL: "DEBUG" \ No newline at end of file + FORCE_RELAY: "true" + REAL_IP_HEADER: "Cf-Connecting-Ip" + QUARKUS_LOG_CATEGORY__COM_FAFOREVER__LEVEL: "TRACE" \ No newline at end of file diff --git a/apps/faf-icebreaker/templates/deployment.yaml b/apps/faf-icebreaker/templates/deployment.yaml index f38ed8ef..6c0e0c1d 100644 --- a/apps/faf-icebreaker/templates/deployment.yaml +++ b/apps/faf-icebreaker/templates/deployment.yaml @@ -32,7 +32,7 @@ spec: - name: geolite-db mountPath: /usr/share/GeoIP containers: - - image: faforever/faf-icebreaker:1.1.9 + - image: faforever/faf-icebreaker:1.2.0-RC4 imagePullPolicy: Always name: faf-icebreaker envFrom: diff --git a/apps/faf-icebreaker/templates/local-secret.yaml b/apps/faf-icebreaker/templates/local-secret.yaml index 31d072e5..481deae6 100644 --- a/apps/faf-icebreaker/templates/local-secret.yaml +++ b/apps/faf-icebreaker/templates/local-secret.yaml @@ -13,6 +13,7 @@ stringData: RABBITMQ_PASSWORD: "banana" XIRSYS_IDENT: "banana" XIRSYS_SECRET: "banana" + HETZNER_API_KEY: "banana" JWT_PRIVATE_KEY_PATH: |- -----BEGIN PRIVATE KEY----- MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDXsCsl9W0vnW2k diff --git a/apps/faf-legacy-deployment/scripts/CoopDeployer.kt b/apps/faf-legacy-deployment/scripts/CoopDeployer.kt index 4e3859cb..4d4747c1 100755 --- a/apps/faf-legacy-deployment/scripts/CoopDeployer.kt +++ b/apps/faf-legacy-deployment/scripts/CoopDeployer.kt @@ -1,7 +1,12 @@ -import org.apache.commons.compress.archivers.zip.Zip64Mode +@file:Suppress("PackageDirectoryMismatch") + +package com.faforever.coopdeployer + +import com.faforever.FafDatabase +import com.faforever.GitRepo +import com.faforever.Log import org.apache.commons.compress.archivers.zip.ZipArchiveEntry import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream -import org.eclipse.jgit.api.Git import org.slf4j.LoggerFactory import java.io.IOException import java.net.URI @@ -15,12 +20,7 @@ import java.nio.file.StandardCopyOption import java.nio.file.attribute.FileTime import java.nio.file.attribute.PosixFilePermission import java.security.MessageDigest -import java.sql.Connection -import java.sql.DriverManager import java.time.Duration -import java.util.zip.CRC32 -import java.util.zip.ZipEntry -import java.util.zip.ZipOutputStream import kotlin.io.path.inputStream private val log = LoggerFactory.getLogger("CoopDeployer") @@ -34,33 +34,6 @@ fun Path.setPerm664() { Files.setPosixFilePermissions(this, perms) } -data class FeatureModGitRepo( - val workDir: Path, - val repoUrl: String, - val gitRef: String, -) { - fun checkout(): Path { - if (Files.exists(workDir.resolve(".git"))) { - log.info("Repo exists — fetching and checking out $gitRef...") - Git.open(workDir.toFile()).use { git -> - git.fetch().call() - git.checkout().setName(gitRef).call() - } - } else { - log.info("Cloning repository $repoUrl") - Git.cloneRepository() - .setURI(repoUrl) - .setDirectory(workDir.toFile()) - .call() - log.info("Checking out $gitRef") - Git.open(workDir.toFile()).use { git -> - git.checkout().setName(gitRef).call() - } - } - - return workDir - } -} data class GithubReleaseAssetDownloader( val repoOwner: String = "FAForever", @@ -160,25 +133,14 @@ data class GithubReleaseAssetDownloader( } -data class FafDatabase( - val host: String, - val database: String, - val username: String, - val password: String, +data class CoopDatabase( val dryRun: Boolean -) : AutoCloseable { +) : FafDatabase() { /** * Definition of an existing file in the database */ data class PatchFile(val mod: String, val fileId: Int, val name: String, val md5: String, val version: Int) - private val connection: Connection = - DriverManager.getConnection( - "jdbc:mariadb://$host/$database?useSSL=false&serverTimezone=UTC", - username, - password - ) - fun getCurrentPatchFile(mod: String, fileId: Int): PatchFile? { val sql = """ SELECT uf.fileId, uf.name, uf.md5, t.v @@ -191,7 +153,7 @@ data class FafDatabase( WHERE uf.fileId = ? """.trimIndent() - connection.prepareStatement(sql).use { stmt -> + prepareStatement(sql).use { stmt -> stmt.setInt(1, fileId) val rs = stmt.executeQuery() while (rs.next()) { @@ -213,12 +175,12 @@ data class FafDatabase( } val del = "DELETE FROM updates_${mod}_files WHERE fileId=? AND version=?" val ins = "INSERT INTO updates_${mod}_files (fileId, version, name, md5, obselete) VALUES (?, ?, ?, ?, 0)" - connection.prepareStatement(del).use { + prepareStatement(del).use { it.setInt(1, fileId) it.setInt(2, version) it.executeUpdate() } - connection.prepareStatement(ins).use { + prepareStatement(ins).use { it.setInt(1, fileId) it.setInt(2, version) it.setString(3, name) @@ -226,10 +188,6 @@ data class FafDatabase( it.executeUpdate() } } - - override fun close() { - connection.close() - } } private const val MINIMUM_ZIP_DATE = 315532800000L // 1980-01-01 @@ -238,7 +196,7 @@ private val MINIMUM_ZIP_FILE_TIME = FileTime.fromMillis(MINIMUM_ZIP_DATE) class Patcher( val patchVersion: Int, val targetDir: Path, - val db: FafDatabase, + val db: CoopDatabase, val dryRun: Boolean, ) { /** @@ -409,22 +367,18 @@ class Patcher( } fun main() { + Log.init() + val PATCH_VERSION = System.getenv("PATCH_VERSION") ?: error("PATCH_VERSION required") val REPO_URL = System.getenv("GIT_REPO_URL") ?: "https://github.com/FAForever/fa-coop.git" val GIT_REF = System.getenv("GIT_REF") ?: "v$PATCH_VERSION" - val WORKDIR = System.getenv("GIT_WORKDIR") ?: "/tmp/fa-coop-kt" + val WORKDIR = System.getenv("GIT_WORKDIR") ?: "/tmp/fa-coop" val DRYRUN = (System.getenv("DRY_RUN") ?: "false").lowercase() in listOf("1", "true", "yes") - - val DB_HOST = System.getenv("DATABASE_HOST") ?: "localhost" - val DB_NAME = System.getenv("DATABASE_NAME") ?: "faf" - val DB_USER = System.getenv("DATABASE_USERNAME") ?: "root" - val DB_PASS = System.getenv("DATABASE_PASSWORD") ?: "banana" - val TARGET_DIR = Paths.get("./legacy-featured-mod-files") log.info("=== Kotlin Coop Deployer v{} ===", PATCH_VERSION) - val repo = FeatureModGitRepo( + val repo = GitRepo( workDir = Paths.get(WORKDIR), repoUrl = REPO_URL, gitRef = GIT_REF @@ -476,13 +430,7 @@ fun main() { Patcher.PatchFile(25, "FAF_Coop_Operation_Tight_Spot_VO.v%d.nx2", null), ) - FafDatabase( - host = DB_HOST, - database = DB_NAME, - username = DB_USER, - password = DB_PASS, - dryRun = DRYRUN - ).use { db -> + CoopDatabase(dryRun = DRYRUN).use { db -> val patcher = Patcher( patchVersion = PATCH_VERSION.toInt(), targetDir = TARGET_DIR, diff --git a/apps/faf-legacy-deployment/scripts/CoopMapDeployer.kt b/apps/faf-legacy-deployment/scripts/CoopMapDeployer.kt new file mode 100644 index 00000000..238fd48f --- /dev/null +++ b/apps/faf-legacy-deployment/scripts/CoopMapDeployer.kt @@ -0,0 +1,238 @@ +@file:Suppress("PackageDirectoryMismatch") + +package com.faforever.coopmapdeployer + +import com.faforever.FafDatabase +import com.faforever.GitRepo +import com.faforever.Log +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry +import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream +import org.slf4j.LoggerFactory +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths +import java.nio.file.attribute.FileTime +import java.security.MessageDigest +import kotlin.io.path.copyTo +import kotlin.io.path.createDirectories +import kotlin.io.path.exists +import kotlin.io.path.isDirectory +import kotlin.io.path.isRegularFile +import kotlin.io.path.readBytes +import kotlin.io.path.readText +import kotlin.io.path.walk + +private val log = LoggerFactory.getLogger("coop-maps-updater") + +private const val FIXED_TIMESTAMP = 1078100502L // 2004-03-01T00:21:42Z +private val FIXED_FILE_TIME = FileTime.fromMillis(FIXED_TIMESTAMP) + + +data class CoopMap( + val folderName: String, + val mapId: Int, + val mapType: Int +) { + fun zipName(version: Int) = + "${folderName.lowercase()}.v${version.toString().padStart(4, '0')}.zip" + + fun folderName(version: Int) = + "${folderName.lowercase()}.v${version.toString().padStart(4, '0')}" +} + +private val coopMaps = listOf( + CoopMap("X1CA_Coop_001", 1, 0), + CoopMap("X1CA_Coop_002", 3, 0), + CoopMap("X1CA_Coop_003", 4, 0), + CoopMap("X1CA_Coop_004", 5, 0), + CoopMap("X1CA_Coop_005", 6, 0), + CoopMap("X1CA_Coop_006", 7, 0), + + CoopMap("SCCA_Coop_A01", 8, 1), + CoopMap("SCCA_Coop_A02", 9, 1), + CoopMap("SCCA_Coop_A03", 10, 1), + CoopMap("SCCA_Coop_A04", 11, 1), + CoopMap("SCCA_Coop_A05", 12, 1), + CoopMap("SCCA_Coop_A06", 13, 1), + + CoopMap("SCCA_Coop_R01", 20, 2), + CoopMap("SCCA_Coop_R02", 21, 2), + CoopMap("SCCA_Coop_R03", 22, 2), + CoopMap("SCCA_Coop_R04", 23, 2), + CoopMap("SCCA_Coop_R05", 24, 2), + CoopMap("SCCA_Coop_R06", 25, 2), + + CoopMap("SCCA_Coop_E01", 14, 3), + CoopMap("SCCA_Coop_E02", 15, 3), + CoopMap("SCCA_Coop_E03", 16, 3), + CoopMap("SCCA_Coop_E04", 17, 3), + CoopMap("SCCA_Coop_E05", 18, 3), + CoopMap("SCCA_Coop_E06", 19, 3), + + CoopMap("FAF_Coop_Prothyon_16", 26, 4), + CoopMap("FAF_Coop_Fort_Clarke_Assault", 27, 4), + CoopMap("FAF_Coop_Theta_Civilian_Rescue", 28, 4), + CoopMap("FAF_Coop_Novax_Station_Assault", 31, 4), + CoopMap("FAF_Coop_Operation_Tha_Atha_Aez", 32, 4), + CoopMap("FAF_Coop_Havens_Invasion", 33, 4), + CoopMap("FAF_Coop_Operation_Rescue", 35, 4), + CoopMap("FAF_Coop_Operation_Uhthe_Thuum_QAI", 36, 4), + CoopMap("FAF_Coop_Operation_Yath_Aez", 37, 4), + CoopMap("FAF_Coop_Operation_Ioz_Shavoh_Kael", 38, 4), + CoopMap("FAF_Coop_Operation_Trident", 39, 4), + CoopMap("FAF_Coop_Operation_Blockade", 40, 4), + CoopMap("FAF_Coop_Operation_Golden_Crystals", 41, 4), + CoopMap("FAF_Coop_Operation_Holy_Raid", 42, 4), + CoopMap("FAF_Coop_Operation_Tight_Spot", 45, 4), + CoopMap("FAF_Coop_Operation_Overlord_Surth_Velsok", 47, 4), + CoopMap("FAF_Coop_Operation_Rebels_Rest", 48, 4), + CoopMap("FAF_Coop_Operation_Red_Revenge", 49, 4), +) + +data class CoopMapDatabase( + val dryRun: Boolean +) : FafDatabase() { + fun getLatestVersion(map: CoopMap): Int { + createStatement().use { st -> + st.executeQuery("SELECT version FROM coop_map WHERE id=${map.mapId}") + .use { rs -> + if (!rs.next()) error("Map ${map.mapId} not found") + return rs.getInt(1) + } + } + } + + fun update(map: CoopMap, version: Int) { + val sql = """ + UPDATE coop_map + SET version=$version, + filename='maps/${map.zipName(version)}' + WHERE id=${map.mapId} + """.trimIndent() + + createStatement().use { it.executeUpdate(sql) } + } +} + +private fun processCoopMap( + db: CoopMapDatabase, + map: CoopMap, + simulate: Boolean, + gitDir: String, + mapsDir: String +) { + log.info("Processing $map") + + val tmp = Files.createTempDirectory("coop-map") + try { + Files.walk(Path.of(gitDir, map.folderName)).forEach { + val target = tmp.resolve(Path.of(gitDir, map.folderName).relativize(it)) + if (it.isDirectory()) target.createDirectories() + else it.copyTo(target) + } + + val files = tmp.walk().filter { it.isRegularFile() }.toList() + val currentVersion = db.getLatestVersion(map) + + val currentZip = Path.of(mapsDir, map.zipName(currentVersion)) + val tmpZip = tmp.resolve(map.zipName(currentVersion)) + + createZip(map, currentVersion, files, tmp, tmpZip) + + val changed = currentVersion == 0 || + !currentZip.exists() || + md5(currentZip) != md5(tmpZip) + + if (!changed) { + log.info("$map unchanged") + return + } + + val newVersion = currentVersion + 1 + log.info("$map updated → v$newVersion") + + if (!simulate) { + val finalZip = Path.of(mapsDir, map.zipName(newVersion)) + createZip(map, newVersion, files, tmp, finalZip) + db.update(map, newVersion) + } + } finally { + tmp.toFile().deleteRecursively() + } +} + +private fun createZip( + map: CoopMap, + version: Int, + files: List, + base: Path, + out: Path +) { + ZipArchiveOutputStream(out.toFile()).use { zip -> + zip.setMethod(ZipArchiveEntry.DEFLATED) + + files.forEach { file -> + val rel = base.relativize(file) + val entryPath = "/${map.folderName(version)}/$rel" + + val bytes = file.readText() + .replace( + "/maps/${map.folderName}/", + "/maps/${map.folderName(version)}/" + ).toByteArray() + + val entry = ZipArchiveEntry(entryPath).apply { + // Ensure deterministic times + setTime(FIXED_FILE_TIME) + setCreationTime(FIXED_FILE_TIME) + setLastModifiedTime(FIXED_FILE_TIME) + setLastAccessTime(FIXED_FILE_TIME) + + size = bytes.size.toLong() + } + + zip.putArchiveEntry(entry) + zip.write(bytes) + zip.closeArchiveEntry() + } + + zip.finish() + } +} + +private fun md5(path: Path): String { + val md = MessageDigest.getInstance("MD5") + md.update(path.readBytes()) + return md.digest().joinToString("") { "%02x".format(it) } +} + +fun main(args: Array) { + Log.init() + + val MAP_DIR = System.getenv("MAP_DIR") ?: "/opt/faf/data/faf-coop-maps" + val PATCH_VERSION = System.getenv("PATCH_VERSION") ?: error("PATCH_VERSION required") + val REPO_URL = System.getenv("GIT_REPO_URL") ?: "https://github.com/FAForever/faf-coop-maps" + val GIT_REF = System.getenv("GIT_REF") ?: "v$PATCH_VERSION" + val WORKDIR = System.getenv("GIT_WORKDIR") ?: "/tmp/faf-coop-maps" + val DRYRUN = (System.getenv("DRY_RUN") ?: "false").lowercase() in listOf("1", "true", "yes") + + log.info("=== Kotlin Coop Map Deployer v{} ===", PATCH_VERSION) + + Files.createDirectories(Paths.get(MAP_DIR)) + + GitRepo( + workDir = Paths.get(WORKDIR), + repoUrl = REPO_URL, + gitRef = GIT_REF, + ).checkout() + + CoopMapDatabase(dryRun = DRYRUN).use { db -> + coopMaps.forEach { + try { + processCoopMap(db, it, DRYRUN, WORKDIR, MAP_DIR) + } catch (e: Exception) { + log.warn("Failed processing $it", e) + } + } + } +} diff --git a/apps/faf-legacy-deployment/scripts/Utils.kt b/apps/faf-legacy-deployment/scripts/Utils.kt new file mode 100644 index 00000000..8b0b9b5a --- /dev/null +++ b/apps/faf-legacy-deployment/scripts/Utils.kt @@ -0,0 +1,74 @@ +package com.faforever + +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.Logger +import org.eclipse.jgit.api.Git +import org.slf4j.LoggerFactory +import java.nio.file.Files +import java.nio.file.Path +import java.sql.Connection +import java.sql.DriverManager +import java.sql.PreparedStatement +import java.sql.Statement + +object Log { + fun init() { + val level = System.getenv("LOG_LEVEL") ?: "INFO" + val root = LoggerFactory + .getLogger(Logger.ROOT_LOGGER_NAME) as Logger + root.level = Level.toLevel(level, Level.INFO) + } +} + +data class GitRepo( + val workDir: Path, + val repoUrl: String, + val gitRef: String, +) { + private val log = LoggerFactory.getLogger(GitRepo::class.simpleName) + + fun checkout(): Path { + if (Files.exists(workDir.resolve(".git"))) { + log.info("Repo exists — fetching and checking out $gitRef...") + Git.open(workDir.toFile()).use { git -> + git.fetch().call() + git.checkout().setName(gitRef).call() + } + } else { + log.info("Cloning repository $repoUrl") + Git.cloneRepository() + .setURI(repoUrl) + .setDirectory(workDir.toFile()) + .call() + log.info("Checking out $gitRef") + Git.open(workDir.toFile()).use { git -> + git.checkout().setName(gitRef).call() + } + } + + return workDir + } +} + +abstract class FafDatabase : AutoCloseable { + private val host = System.getenv("DATABASE_HOST") ?: "localhost" + private val database = System.getenv("DATABASE_NAME") ?: "faf" + private val username = System.getenv("DATABASE_USERNAME") ?: "root" + private val password = System.getenv("DATABASE_PASSWORD") ?: "banana" + + private val connection: Connection = + DriverManager.getConnection( + "jdbc:mariadb://$host/$database?useSSL=false&serverTimezone=UTC", + username, + password + ) + + fun createStatement(): Statement = connection.createStatement() + + fun prepareStatement(sql: String): PreparedStatement = connection.prepareStatement(sql) + + override fun close() { + connection.close() + } +} + diff --git a/apps/faf-legacy-deployment/scripts/build.gradle.kts b/apps/faf-legacy-deployment/scripts/build.gradle.kts index 10f0c44b..de560733 100644 --- a/apps/faf-legacy-deployment/scripts/build.gradle.kts +++ b/apps/faf-legacy-deployment/scripts/build.gradle.kts @@ -12,11 +12,7 @@ dependencies { implementation("org.eclipse.jgit:org.eclipse.jgit:7.5.0.202512021534-r") implementation("org.apache.commons:commons-compress:1.28.0") implementation("org.slf4j:slf4j-api:2.0.13") - runtimeOnly("ch.qos.logback:logback-classic:1.5.23") -} - -application { - mainClass.set("CoopDeployerKt") // filename + Kt + implementation("ch.qos.logback:logback-classic:1.5.23") } // Use the root level for files @@ -24,4 +20,20 @@ sourceSets { main { kotlin.srcDirs(".") } +} + +tasks.register("deployCoop") { + group = "application" + description = "Deploy coop" + + classpath = sourceSets.main.get().runtimeClasspath + mainClass.set("com.faforever.coopdeployer.CoopDeployerKt") +} + +tasks.register("deployCoopMaps") { + group = "application" + description = "Deploy coop maps" + + classpath = sourceSets.main.get().runtimeClasspath + mainClass.set("com.faforever.coopmapdeployer.CoopMapDeployerKt") } \ No newline at end of file diff --git a/apps/faf-legacy-deployment/scripts/deploy-coop-maps.py b/apps/faf-legacy-deployment/scripts/deploy-coop-maps.py deleted file mode 100644 index b28badef..00000000 --- a/apps/faf-legacy-deployment/scripts/deploy-coop-maps.py +++ /dev/null @@ -1,381 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -""" -clone: https://github.com/FAForever/faf-coop-maps - -FAF coop maps updater - -All default settings are setup for FAF production! -Override the directory settings for local testing. -To get more help run - $ pipenv run patch-coop-maps -h - -Default usage: - $ pipenv run patch-coop-maps -s -""" -import argparse -import hashlib -import logging -import os -import shutil -import subprocess -import sys -import zipfile -from tempfile import TemporaryDirectory -from typing import NamedTuple, List - -import mysql.connector - -logger: logging.Logger = logging.getLogger() -logger.setLevel(logging.DEBUG) - -fixed_file_timestamp = 1078100502 # 2004-03-01T00:21:42Z - - -db_config = { - "host": os.getenv("DATABASE_HOST", "localhost"), - "user": os.getenv("DATABASE_USERNAME", "root"), - "password": os.getenv("DATABASE_PASSWORD", "banana"), - "database": os.getenv("DATABASE_NAME", "faf_lobby"), -} - - -def get_db_connection(): - """Create and return a MySQL connection.""" - try: - conn = mysql.connector.connect(**db_config) - if conn.is_connected(): - logger.debug(f"Connected to MySQL at {db_config['host']}") - return conn - except Error as e: - logger.error(f"MySQL connection failed: {e}") - sys.exit(1) - - -def run_sql(conn, sql: str) -> str: - """ - Run an SQL query directly on the MySQL database instead of via Docker. - Returns output in a string format similar to the old implementation. - """ - logger.debug(f"Executing SQL query:\n{sql}") - try: - with conn.cursor() as cursor: - cursor.execute(sql) - - # If it's a SELECT query, fetch and format results - if sql.strip().lower().startswith("select"): - rows = cursor.fetchall() - column_names = [desc[0] for desc in cursor.description] - # Simulate the Docker mysql CLI tabular text output - lines = ["\t".join(column_names)] - for row in rows: - lines.append("\t".join(str(x) for x in row)) - result = "\n".join(lines) - else: - conn.commit() - result = "Query OK" - - logger.debug(f"SQL result:\n{result}") - return result - - except Error as e: - logger.error(f"SQL execution failed: {e}") - sys.exit(1) - - -class CoopMap(NamedTuple): - folder_name: str - map_id: int - map_type: int - - def build_zip_filename(self, version: int) -> str: - return f"{self.folder_name.lower()}.v{version:04d}.zip" - - def build_folder_name(self, version: int) -> str: - return f"{self.folder_name.lower()}.v{version:04d}" - - -# Coop maps are in db table `coop_map` -coop_maps: List[CoopMap] = [ - # Forged Alliance missions - CoopMap("X1CA_Coop_001", 1, 0), - CoopMap("X1CA_Coop_002", 3, 0), - CoopMap("X1CA_Coop_003", 4, 0), - CoopMap("X1CA_Coop_004", 5, 0), - CoopMap("X1CA_Coop_005", 6, 0), - CoopMap("X1CA_Coop_006", 7, 0), - - # Vanilla Aeon missions - CoopMap("SCCA_Coop_A01", 8, 1), - CoopMap("SCCA_Coop_A02", 9, 1), - CoopMap("SCCA_Coop_A03", 10, 1), - CoopMap("SCCA_Coop_A04", 11, 1), - CoopMap("SCCA_Coop_A05", 12, 1), - CoopMap("SCCA_Coop_A06", 13, 1), - - # Vanilla Cybran missions - CoopMap("SCCA_Coop_R01", 20, 2), - CoopMap("SCCA_Coop_R02", 21, 2), - CoopMap("SCCA_Coop_R03", 22, 2), - CoopMap("SCCA_Coop_R04", 23, 2), - CoopMap("SCCA_Coop_R05", 24, 2), - CoopMap("SCCA_Coop_R06", 25, 2), - - # Vanilla UEF missions - CoopMap("SCCA_Coop_E01", 14, 3), - CoopMap("SCCA_Coop_E02", 15, 3), - CoopMap("SCCA_Coop_E03", 16, 3), - CoopMap("SCCA_Coop_E04", 17, 3), - CoopMap("SCCA_Coop_E05", 18, 3), - CoopMap("SCCA_Coop_E06", 19, 3), - - # Custom missions - CoopMap("FAF_Coop_Prothyon_16", 26, 4), - CoopMap("FAF_Coop_Fort_Clarke_Assault", 27, 4), - CoopMap("FAF_Coop_Theta_Civilian_Rescue", 28, 4), - CoopMap("FAF_Coop_Novax_Station_Assault", 31, 4), - CoopMap("FAF_Coop_Operation_Tha_Atha_Aez", 32, 4), - CoopMap("FAF_Coop_Havens_Invasion", 33, 4), - CoopMap("FAF_Coop_Operation_Rescue", 35, 4), - CoopMap("FAF_Coop_Operation_Uhthe_Thuum_QAI", 36, 4), - CoopMap("FAF_Coop_Operation_Yath_Aez", 37, 4), - CoopMap("FAF_Coop_Operation_Ioz_Shavoh_Kael", 38, 4), - CoopMap("FAF_Coop_Operation_Trident", 39, 4), - CoopMap("FAF_Coop_Operation_Blockade", 40, 4), - CoopMap("FAF_Coop_Operation_Golden_Crystals", 41, 4), - CoopMap("FAF_Coop_Operation_Holy_Raid", 42, 4), - CoopMap("FAF_Coop_Operation_Tight_Spot", 45, 4), - CoopMap("FAF_Coop_Operation_Overlord_Surth_Velsok", 47, 4), - CoopMap("FAF_Coop_Operation_Rebel's_Rest", 48, 4), - CoopMap("FAF_Coop_Operation_Red_Revenge", 49, 4), -] - -def fix_file_timestamps(files: List[str]) -> None: - for file in files: - logger.debug(f"Fixing timestamp in {file}") - os.utime(file, (fixed_file_timestamp, fixed_file_timestamp)) - - -def fix_folder_paths(folder_name: str, files: List[str], new_version: int) -> None: - old_maps_lua_path = f"/maps/{folder_name}/" - new_maps_lua_path = f"/maps/{folder_name.lower()}.v{new_version:04d}/" - - for file in files: - logger.debug(f"Fixing lua folder path in {file}: '{old_maps_lua_path}' -> '{new_maps_lua_path}'") - - with open(file, "rb") as file_handler: - data = file_handler.read() - data = data.replace(old_maps_lua_path.encode(), new_maps_lua_path.encode()) - - with open(file, "wb") as file_handler: - file_handler.seek(0) - file_handler.write(data) - - -def get_latest_map_version(coop_map: CoopMap) -> int: - logger.debug(f"Fetching latest map version for coop map {coop_map}") - - query = f""" - SELECT version FROM coop_map WHERE id = {coop_map.map_id}; - """ - result = run_sql(query).split("\n") - assert len(result) == 3, f"Mysql returned wrong result! Either map id {coop_map.map_id} is not in table coop_map" \ - f" or the where clause is wrong. Result: " + "\n".join(result) - return int(result[1]) - - -def new_file_is_different(old_file_name: str, new_file_name: str) -> bool: - old_file_md5 = calc_md5(old_file_name) - new_file_md5 = calc_md5(new_file_name) - - logger.debug(f"MD5 hash of {old_file_name} is: {old_file_md5}") - logger.debug(f"MD5 hash of {new_file_name} is: {new_file_md5}") - - return old_file_md5 != new_file_md5 - - -def update_database(coop_map: CoopMap, new_version: int) -> None: - logger.debug(f"Updating coop map {coop_map} in database to version {new_version}") - - query = f""" - UPDATE coop_map - SET version = {new_version}, filename = "maps/{coop_map.build_zip_filename(new_version)}" - WHERE id = {coop_map.map_id} - """ - run_sql(query) - - -def copytree(src, dst, symlinks=False, ignore=None): - """ - Reason for that method is because shutil.copytree will raise exception on existing - temporary directory - """ - - for item in os.listdir(src): - s = os.path.join(src, item) - d = os.path.join(dst, item) - if os.path.isdir(s): - shutil.copytree(s, d, symlinks, ignore) - else: - shutil.copy2(s, d) - - -def create_zip_package(coop_map: CoopMap, version: int, files: List[str], tmp_folder_path: str, zip_file_path: str): - fix_folder_paths(coop_map.folder_name, files, version) - fix_file_timestamps(files) - with zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_BZIP2) as zip_file: - for path in files: - zip_file.write(path, arcname=f"/{coop_map.build_folder_name(version)}/{os.path.relpath(path, tmp_folder_path)}") - - -def process_coop_map(coop_map: CoopMap, simulate: bool, git_directory:str, coop_maps_path: str): - logger.info(f"Processing: {coop_map}") - - temp_dir = TemporaryDirectory() - copytree(os.path.join(git_directory, coop_map.folder_name), temp_dir.name) - processing_files = [] - for root, dirs, files in os.walk(temp_dir.name): - for f in files: - processing_files.append(os.path.relpath(os.path.join(root, f), temp_dir.name)) - - logger.debug(f"Files to process in {coop_map}: {processing_files}") - current_version = get_latest_map_version(coop_map) - current_file_path = os.path.join(coop_maps_path, coop_map.build_zip_filename(current_version)) - zip_file_path = os.path.join(temp_dir.name, coop_map.build_zip_filename(current_version)) - create_zip_package(coop_map, current_version, processing_files, temp_dir.name, zip_file_path) - if current_version == 0 or new_file_is_different(current_file_path, zip_file_path): - new_version = current_version + 1 - - if current_version == 0: - logger.info(f"{coop_map} first upload. New version: {new_version}") - else: - logger.info(f"{coop_map} has changed. New version: {new_version}") - - if not simulate: - temp_dir.cleanup() - temp_dir = TemporaryDirectory() - copytree(os.path.join(git_directory, coop_map.folder_name), temp_dir.name) - - zip_file_path = os.path.join(coop_maps_path, coop_map.build_zip_filename(new_version)) - create_zip_package(coop_map, new_version, processing_files, temp_dir.name, zip_file_path) - - update_database(coop_map, new_version) - else: - logger.info(f"Updating database skipped due to simulation") - else: - logger.info(f"{coop_map} remains unchanged") - temp_dir.cleanup() - - -def calc_md5(filename: str) -> str: - """ - Calculate the MD5 hash of a file - """ - hash_md5 = hashlib.md5() - with open(filename, "rb") as f: - for chunk in iter(lambda: f.read(4096), b""): - hash_md5.update(chunk) - return hash_md5.hexdigest() - - -def run_checked_shell(cmd: List[str]) -> subprocess.CompletedProcess: - """ - Runs a command as a shell process and checks for success - Output is captured in the result object - :param cmd: command to run - :return: CompletedProcess of the execution - """ - logger.debug("Run shell command: {cmd}".format(cmd=cmd)) - return subprocess.run(cmd, check=True, stdout=subprocess.PIPE) - - -def run_sql(sql: str, container: str = "faf-db", database: str = "faf_lobby") -> str: - - """ - Run a sql-query against the faf-db in the docker container - :param database: name of the database where to run the query - :param container: name of the docker container where to run the query - :param sql: the sql-query to run - :return: the query output as string - """ - try: - sql_text_result = run_checked_shell( - ["docker", "exec", "-u", "root", container, "mysql", database, "-e", sql] - ).stdout.decode() # type: str - logger.debug(f"SQL output >>> \n{sql_text_result}<<<") - return sql_text_result - except subprocess.CalledProcessError as e: - logger.error(f"""Executing sql query failed: {sql}\n\t\tError message: {str(e)}""") - exit(1) - - -def git_checkout(path: str, tag: str) -> None: - """ - Checkout a git tag of the git repository. This requires the repo to be checked out in the path folder! - - :param path: the path of the git repository to checkout - :param tag: version of the git tag (full name) - :return: nothing - """ - cwd = os.getcwd() - os.chdir(path) - logger.debug(f"Git checkout from path {path}") - - try: - run_checked_shell(["git", "fetch"]) - run_checked_shell(["git", "checkout", tag]) - except subprocess.CalledProcessError as e: - logger.error(f"git checkout failed - please check the error message: {e.stderr}") - exit(1) - finally: - os.chdir(cwd) - - -def create_zip(content: List[str], relative_to: str, output_file: str) -> None: - logger.debug(f"Zipping files to file `{output_file}`: {content}") - - with zipfile.ZipFile(output_file, 'w', zipfile.ZIP_DEFLATED) as zip_file: - for path in content: - if os.path.isdir(path): - cwd = os.getcwd() - os.chdir(path) - - for root, dirs, files in os.walk(path): - for next_file in files: - file_path = os.path.join(root, next_file) - zip_file.write(file_path, os.path.relpath(file_path, relative_to)) - - os.chdir(cwd) - else: - zip_file.write(path, os.path.relpath(path, relative_to)) - - -if __name__ == "__main__": - # Setting up logger - stream_handler = logging.StreamHandler(sys.stdout) - stream_handler.setFormatter(logging.Formatter('%(levelname)-5s - %(message)s')) - logger.addHandler(stream_handler) - - # Setting up CLI arguments - parser = argparse.ArgumentParser(description=__doc__) - - parser.add_argument("version", help="the git tag name of the version") - parser.add_argument("-s", "--simulate", dest="simulate", action="store_true", default=False, - help="only runs a simulation without updating the database") - parser.add_argument("--git-directory", dest="git_directory", action="store", - default="/opt/featured-mods/faf-coop-maps", - help="base directory of the faf-coop-maps repository") - parser.add_argument("--maps-directory", dest="coop_maps_path", action="store", - default="/opt/faf/data/maps", - help="directory of the coop map files (content server)") - - args = parser.parse_args() - - git_checkout(args.git_directory, args.version) - - for coop_map in coop_maps: - try: - process_coop_map(coop_map, args.simulate, args.git_directory, args.coop_maps_path) - except Exception as error: - logger.warning(f"Unable to parse {coop_map}", exc_info=True) diff --git a/apps/faf-legacy-deployment/templates/config.yaml b/apps/faf-legacy-deployment/templates/config.yaml new file mode 100644 index 00000000..4c282829 --- /dev/null +++ b/apps/faf-legacy-deployment/templates/config.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: faf-legacy-deployment + labels: + app: faf-legacy-deployment +data: + DATABASE_HOST: "mariadb" + DATABASE_NAME: "faf_lobby" diff --git a/apps/faf-legacy-deployment/templates/deploy-coop-maps.yaml b/apps/faf-legacy-deployment/templates/deploy-coop-maps.yaml index f1629a38..ce877034 100644 --- a/apps/faf-legacy-deployment/templates/deploy-coop-maps.yaml +++ b/apps/faf-legacy-deployment/templates/deploy-coop-maps.yaml @@ -1,18 +1,3 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: faf-deploy-coop-maps - labels: - app: faf-deploy-coop-maps -data: - PATCH_VERSION: "65" - DATABASE_HOST: "mariadb" - DATABASE_NAME: "faf_lobby" - "deploy-coop-maps.py": |- -{{ tpl ( .Files.Get "scripts/deploy-coop-maps.py" ) . | indent 4 }} - ---- - kind: CronJob apiVersion: batch/v1 metadata: @@ -35,22 +20,34 @@ spec: template: spec: containers: - - image: python:3.13 + - image: gradle:9.2-jdk21 imagePullPolicy: Always - name: faf-coop-deployment + name: faf-deploy-coop + workingDir: /workspace + env: + - name: PATCH_VERSION + value: "9.0.2" envFrom: - configMapRef: - name: faf-deploy-coop-maps + name: faf-legacy-deployment - secretRef: name: faf-legacy-deployment - command: [ "sh" ] - args: [ "-c", "pip install mysql-connector-python && python3 /tmp/deploy-coop-maps.py" ] + command: + - "sh" + - "-c" + - "cp /scripts/* /workspace && gradle deployCoopMaps" + # We need to mount single files via subpath because Gradle breaks otherwise (symbolic link to read-only directory) volumeMounts: - - mountPath: /tmp/deploy-coop-maps.py - name: faf-deploy-coop-maps - subPath: "deploy-coop-maps.py" + - mountPath: /scripts + name: faf-deploy-scripts + - mountPath: /workspace/legacy-featured-mod-files + name: faf-featured-mods restartPolicy: Never volumes: - - name: faf-deploy-coop-maps + - name: faf-deploy-scripts configMap: - name: "faf-deploy-coop-maps" + name: "faf-deploy-scripts" + - name: faf-featured-mods + hostPath: + path: /opt/faf/data/legacy-featured-mod-files + type: Directory \ No newline at end of file diff --git a/apps/faf-legacy-deployment/templates/deploy-coop.yaml b/apps/faf-legacy-deployment/templates/deploy-coop.yaml index c4c7a668..ac01eb55 100644 --- a/apps/faf-legacy-deployment/templates/deploy-coop.yaml +++ b/apps/faf-legacy-deployment/templates/deploy-coop.yaml @@ -1,16 +1,3 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: faf-deploy-coop - labels: - app: faf-deploy-coop -data: - PATCH_VERSION: "66" - DATABASE_HOST: "mariadb" - DATABASE_NAME: "faf_lobby" - ---- - kind: CronJob apiVersion: batch/v1 metadata: @@ -37,20 +24,22 @@ spec: imagePullPolicy: Always name: faf-deploy-coop workingDir: /workspace + env: + - name: PATCH_VERSION + value: "66" envFrom: - configMapRef: - name: faf-deploy-coop + name: faf-legacy-deployment - secretRef: name: faf-legacy-deployment - command: [ "gradle", "run" ] + command: + - "sh" + - "-c" + - "cp /scripts/* /workspace && gradle deployCoop" # We need to mount single files via subpath because Gradle breaks otherwise (symbolic link to read-only directory) volumeMounts: - - mountPath: /workspace/build.gradle.kts - name: faf-deploy-scripts - subPath: "build.gradle.kts" - - mountPath: /workspace/CoopDeployer.kt + - mountPath: /scripts name: faf-deploy-scripts - subPath: "CoopDeployer.kt" - mountPath: /workspace/legacy-featured-mod-files name: faf-featured-mods restartPolicy: Never diff --git a/apps/faf-lobby-server/config/config.yaml b/apps/faf-lobby-server/config/config.yaml index 5a5679e6..77514dc2 100644 --- a/apps/faf-lobby-server/config/config.yaml +++ b/apps/faf-lobby-server/config/config.yaml @@ -1,5 +1,5 @@ CONFIGURATION_REFRESH_TIME: 60 -LOG_LEVEL: "INFO" +LOG_LEVEL: "DEBUG" LISTEN: - ADDRESS: @@ -40,6 +40,9 @@ NEWBIE_TIME_BONUS: .25 MAXIMUM_NEWBIE_TIME_BONUS: 3.0 MINORITY_BONUS: 1 +LADDER_TOP_PLAYER_SEARCH_EXPANSION_MAX: 1.0 +LADDER_TOP_PLAYER_SEARCH_EXPANSION_STEP: 1.0 + QUEUE_POP_TIME_MAX: 90 # LADDER_VIOLATIONS_ENABLED: false \ No newline at end of file diff --git a/apps/faf-replay-server/config/config.yaml b/apps/faf-replay-server/config/config.yaml index c46329c7..8c15c806 100644 --- a/apps/faf-replay-server/config/config.yaml +++ b/apps/faf-replay-server/config/config.yaml @@ -17,7 +17,7 @@ storage: replay: forced_timeout_s: 18000 time_with_zero_writers_to_end_replay_s: 30 - delay_s: 300 + delay_s: 30 update_interval_s: 1 merge_quorum_size: 2 - stream_comparison_distance_b: 4096 \ No newline at end of file + stream_comparison_distance_b: 4096 diff --git a/apps/faf-user-service/templates/deployment.yaml b/apps/faf-user-service/templates/deployment.yaml index 213ef893..e0de6358 100644 --- a/apps/faf-user-service/templates/deployment.yaml +++ b/apps/faf-user-service/templates/deployment.yaml @@ -40,6 +40,9 @@ spec: limits: memory: 10Gi cpu: 3000m + requests: + memory: 2Gi + cpu: 1000m startupProbe: httpGet: port: 8080 diff --git a/apps/nodebb/templates/local-secret.yaml b/apps/nodebb/templates/local-secret.yaml index a580660a..49050209 100644 --- a/apps/nodebb/templates/local-secret.yaml +++ b/apps/nodebb/templates/local-secret.yaml @@ -9,7 +9,7 @@ stringData: OAUTH_SECRET: "banana" "config.json": | { - "url": "https://forum.localhost", + "url": "https://forum.{{ .Values.baseDomain }}", "secret": "banana", "database": "mongo", "port": 4567, diff --git a/apps/ory-hydra/templates/init-clients.yaml b/apps/ory-hydra/templates/init-clients.yaml index 6e99a8b2..bfb76466 100644 --- a/apps/ory-hydra/templates/init-clients.yaml +++ b/apps/ory-hydra/templates/init-clients.yaml @@ -66,11 +66,12 @@ spec: --policy-uri "{{ .policyUri }}" \ {{- end }} {{- if .tokenEndpointAuthMethod }} - --token-endpoint-auth-method "{{ .tokenEndpointAuthMethod }}" + --token-endpoint-auth-method "{{ .tokenEndpointAuthMethod }}" \ {{- end }} {{- if .owner }} - --owner "{{ .owner }}" + --owner "{{ .owner }}" \ {{- end }} + ; else echo "Client {{ .id }} already exists, skipping." fi diff --git a/cluster/namespaces.yaml b/cluster/namespaces.yaml index ac703faa..5b72ac64 100644 --- a/cluster/namespaces.yaml +++ b/cluster/namespaces.yaml @@ -13,4 +13,10 @@ metadata: apiVersion: v1 kind: Namespace metadata: - name: faf-ops \ No newline at end of file + name: faf-ops + +--- +apiVersion: v1 +kind: Namespace +metadata: + name: traefik \ No newline at end of file diff --git a/cluster/storage/values.yaml b/cluster/storage/values.yaml index 374e8dd4..5abc8e17 100644 --- a/cluster/storage/values.yaml +++ b/cluster/storage/values.yaml @@ -24,6 +24,12 @@ managedStorages: size: 500Gi pvc: namespace: faf-apps + - pv: + name: faf-replays-old + folderName: replays-old + size: 500Gi + pvc: + namespace: faf-apps - pv: name: faf-maps folderName: maps diff --git a/cluster/telepresence/Chart.yaml b/cluster/telepresence/Chart.yaml new file mode 100644 index 00000000..34f52f1e --- /dev/null +++ b/cluster/telepresence/Chart.yaml @@ -0,0 +1,7 @@ +apiVersion: v2 +name: telepresence +version: 1.0.0 +dependencies: + - name: telepresence-oss + version: 2.25.1 + repository: oci://ghcr.io/telepresenceio \ No newline at end of file diff --git a/cluster/telepresence/values.yaml b/cluster/telepresence/values.yaml new file mode 100644 index 00000000..72fa2a8d --- /dev/null +++ b/cluster/telepresence/values.yaml @@ -0,0 +1,3 @@ +namespace: telepresence +telepresence: + releaseName: traffic-manager \ No newline at end of file diff --git a/cluster/traefik/values-local.yaml b/cluster/traefik/values-local.yaml index c72b3237..a1d09c57 100644 --- a/cluster/traefik/values-local.yaml +++ b/cluster/traefik/values-local.yaml @@ -2,13 +2,20 @@ traefik: deployment: enabled: true + ingressRoute: + dashboard: + enabled: true + matchRule: Host(`traefik.faforever.localhost`) + entryPoints: ["websecure", "web"] + additionalArguments: - "--entrypoints.websecure.http.encodequerysemicolons=true" + - "--entrypoints.web.http.encodequerysemicolons=true" updateStrategy: type: Recreate - hostNetwork: true + hostNetwork: false ports: metrics: @@ -17,11 +24,9 @@ traefik: port: 9101 web: - redirections: - entryPoint: - to: websecure - scheme: https - permanent: true + transport: + respondingTimeouts: + readTimeout: 60 websecure: transport: @@ -33,7 +38,7 @@ traefik: # HostNetwork service: - enabled: false + enabled: true logs: general: @@ -43,3 +48,9 @@ traefik: # By default, the level is set to ERROR. # -- Alternative logging levels are DEBUG, PANIC, FATAL, ERROR, WARN, and INFO. level: INFO + access: + enabled: true + + providers: + kubernetesCRD: + allowExternalNameServices: true diff --git a/config/local.yaml b/config/local.yaml index 18750bbe..26d0e9c1 100644 --- a/config/local.yaml +++ b/config/local.yaml @@ -1,6 +1,6 @@ hostName: "localhost" environment: "local" -baseDomain: "localhost" +baseDomain: "faforever.localhost" infisical-secret: enabled: false traefik: {} diff --git a/infra/mariadb/values.yaml b/infra/mariadb/values.yaml index 0845f456..cff135f2 100644 --- a/infra/mariadb/values.yaml +++ b/infra/mariadb/values.yaml @@ -50,6 +50,13 @@ databasesAndUsers: usernameKey: DB_LOGIN passwordKey: DB_PASSWORD + # Icebreaker database + - configMapRef: faf-icebreaker + secretRef: faf-icebreaker + databaseKey: DB_NAME + usernameKey: DB_USERNAME + passwordKey: DB_PASSWORD + # Others - configMapRef: wordpress secretRef: wordpress diff --git a/infra/postgres/values.yaml b/infra/postgres/values.yaml index 276ef0d5..6dd76aec 100644 --- a/infra/postgres/values.yaml +++ b/infra/postgres/values.yaml @@ -1,6 +1,6 @@ image: repository: "postgres" - tag: "17.6-bookworm" + tag: "18.1-bookworm" upgrade-image: repository: "pgautoupgrade/pgautoupgrade" diff --git a/ops/monitoring/Chart.yaml b/ops/monitoring/Chart.yaml index 541e70ff..b48a5be9 100644 --- a/ops/monitoring/Chart.yaml +++ b/ops/monitoring/Chart.yaml @@ -5,9 +5,12 @@ dependencies: - name: victoria-metrics-k8s-stack version: 0.60.1 repository: https://victoriametrics.github.io/helm-charts/ -- name: promtail - version: 6.17.0 - repository: https://grafana.github.io/helm-charts - name: loki version: 6.40.0 repository: https://grafana.github.io/helm-charts +- name: k8s-monitoring + version: 3.5.1 + repository: https://grafana.github.io/helm-charts +- name: k8s-monitoring + version: 3.5.1 + repository: https://grafana.github.io/helm-charts diff --git a/ops/monitoring/values-prod.yaml b/ops/monitoring/values-prod.yaml new file mode 100644 index 00000000..063295f5 --- /dev/null +++ b/ops/monitoring/values-prod.yaml @@ -0,0 +1,3 @@ +k8s-monitoring: + cluster: + name: faforever-com \ No newline at end of file diff --git a/ops/monitoring/values-test.yaml b/ops/monitoring/values-test.yaml new file mode 100644 index 00000000..4bce1a76 --- /dev/null +++ b/ops/monitoring/values-test.yaml @@ -0,0 +1,3 @@ +k8s-monitoring: + cluster: + name: faforever-xyz \ No newline at end of file diff --git a/ops/monitoring/values.yaml b/ops/monitoring/values.yaml index 05ce39f8..1113c317 100644 --- a/ops/monitoring/values.yaml +++ b/ops/monitoring/values.yaml @@ -94,8 +94,6 @@ victoria-metrics-k8s-stack: loki: deploymentMode: SingleBinary - singleBinary: - replicas: 1 # Disable Simple Scalable read: @@ -131,9 +129,6 @@ loki: limits_config: retention_period: 168h - limits_config: - retention_period: 168h - # We know it's working test: enabled: false @@ -145,7 +140,30 @@ loki: chunksCache: enabled: false -promtail: - config: - clients: - - url: http://monitoring-loki-gateway/loki/api/v1/push + # Disable anti-affinity + singleBinary: + replicas: 1 + affinity: null + + gateway: + affinity: null + + +k8s-monitoring: + # Where Alloy pushes data to + # We can add our own Loki, Thanos... + destinations: + - name: local-loki + type: loki + url: http://monitoring-loki-gateway/loki/api/v1/push + + # We are using 1% of this chart, just logs for now, as it implements lots of things that vm-stack provides + # We could switch to this + vmsingle + + # Collectors + alloy-logs: + enabled: true + + # Features + podLogs: + enabled: true \ No newline at end of file diff --git a/tilt/helm/host-proxy/Chart.yaml b/tilt/helm/host-proxy/Chart.yaml new file mode 100644 index 00000000..4e45b836 --- /dev/null +++ b/tilt/helm/host-proxy/Chart.yaml @@ -0,0 +1,3 @@ +apiVersion: v2 +name: host-proxy +version: 1.0.0 \ No newline at end of file diff --git a/tilt/helm/host-proxy/templates/endpoint.yaml b/tilt/helm/host-proxy/templates/endpoint.yaml new file mode 100644 index 00000000..12abf145 --- /dev/null +++ b/tilt/helm/host-proxy/templates/endpoint.yaml @@ -0,0 +1,11 @@ +{{- if .Values.hostIP }} +apiVersion: v1 +kind: Endpoints +metadata: + name: {{.Values.name}} +subsets: + - addresses: + - ip: {{.Values.hostIP}} + ports: + - port: {{.Values.port}} +{{- end }} \ No newline at end of file diff --git a/tilt/helm/host-proxy/templates/service.yaml b/tilt/helm/host-proxy/templates/service.yaml new file mode 100644 index 00000000..73efa2ac --- /dev/null +++ b/tilt/helm/host-proxy/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{.Values.name}} +spec: +{{- if .Values.hostIP }} + clusterIP: None + ports: + - protocol: "TCP" + port: {{.Values.port}} + targetPort: {{.Values.port}} +{{ else }} + type: ExternalName + externalName: {{.Values.hostname}} +{{- end }} \ No newline at end of file diff --git a/tilt/scripts/helm-with-cache.sh b/tilt/scripts/helm-with-cache.sh old mode 100644 new mode 100755 index 68c7f851..f5c86efe --- a/tilt/scripts/helm-with-cache.sh +++ b/tilt/scripts/helm-with-cache.sh @@ -1,10 +1,9 @@ #!/bin/bash -cache_dir=$1 +chart_cache_dir=$1 shift chart=$1 -chart_cache_dir="$cache_dir/$chart" mkdir -p "$chart_cache_dir" diff --git a/tilt/scripts/print-hosts.sh b/tilt/scripts/print-hosts.sh new file mode 100755 index 00000000..7cb926fb --- /dev/null +++ b/tilt/scripts/print-hosts.sh @@ -0,0 +1 @@ +kubectl get IngressRoute -A -o yaml | grep -Po 'Host\(`(.*?)`\)' | sed 's/^.\{6\}\(.*\).\{2\}$/127.0.0.1 \1/' | uniq \ No newline at end of file diff --git a/tilt/scripts/update-faf-featured-mod.sh b/tilt/scripts/update-faf-featured-mod.sh new file mode 100755 index 00000000..f3df8cb8 --- /dev/null +++ b/tilt/scripts/update-faf-featured-mod.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +faf_data_dir=$1 +init_file="$faf_data_dir/bin/init_faf.lua" +init_file_md5sum=($(md5sum $init_file)) + +mariadb_exec() { + kubectl exec --namespace=faf-infra statefulset/mariadb -- mariadb --host=mariadb --user=root --password="banana" faf_lobby "$@" +} + +mariadb_exec -e "TRUNCATE updates_faf_files;" +mariadb_exec -e "TRUNCATE updates_faf;" +mariadb_exec -e "INSERT INTO updates_faf (id, filename, path) values (1, \"init_faf.lua\", \"bin\");" +mariadb_exec -e "INSERT INTO updates_faf_files (fileId, version, name, md5) values (1, 1, \"init_faf_1.lua\", \"$init_file_md5sum\");" \ No newline at end of file