Commit 8216bfa7 authored by Dominik Hebeler's avatar Dominik Hebeler
Browse files

Merge branch 'development' into 'master'

Development

See merge request !1502
parents 391361d3 d737fafe
......@@ -4,10 +4,20 @@ APP_LOG_LEVEL=debug
APP_KEY=
APP_URL=http://localhost
DB_CONNECTION=mysql
DB_HOST=mgdb
DB_PORT=3306
DB_DATABASE=metager
DB_USERNAME=metager
DB_PASSWORD="metager"
REDIS_RESULT_CONNECTION=default
REDIS_RESULT_CACHE_DURATION=60
BROADCAST_DRIVER=log
CACHE_DRIVER=file
SESSION_DRIVER=file
QUEUE_DRIVER=sync
QUEUE_CONNECTION=sync
REDIS_HOST=127.0.0.1
REDIS_PASSWORD=null
......
update(144.76.113.134):
tags:
- 144.76.113.134
variables:
DOCKER_HOST: "tcp://docker-dind.gitlab:2375"
POSTGRES_ENABLED: "false"
CODE_QUALITY_DISABLED: "true"
CONTAINER_SCANNING_DISABLED: "true"
DAST_DISABLED: "true"
DEPENDENCY_SCANNING_DISABLED: "true"
LICENSE_MANAGEMENT_DISABLED: "true"
PERFORMANCE_DISABLED: "true"
SAST_DISABLED: "true"
TEST_DISABLED: "true"
include:
- template: Jobs/Build.gitlab-ci.yml
- template: Jobs/Deploy.gitlab-ci.yml
stages:
- prepare
- build
- deploy # dummy stage to follow the template guidelines
- review
- dast
- staging
- canary
- development
- production
- incremental rollout 10%
- incremental rollout 25%
- incremental rollout 50%
- incremental rollout 100%
- performance
- cleanup
build:
services:
# Prepares the secret files that we cannot or don't want to share with public
prepare_secrets:
stage: prepare
image: alpine:latest
script:
- cp $ENVFILE .env
- cp $SUMAS config/sumas.json
- cp $SUMASEN config/sumasEn.json
- cp $BLACKLISTURL config/blacklistUrl.txt
- cp $BLACKLISTDOMAINS config/blacklistDomains.txt
- cp $ADBLACKLISTURL config/adBlacklistUrl.txt
- cp $ADBLACKLISTDOMAINS config/adBlacklistDomains.txt
- cp $SPAM config/spam.txt
- cp $USERSSEEDER database/seeds/UsersSeeder.php
- cp database/useragents.sqlite.example database/useragents.sqlite
- sed -i 's/^REDIS_PASSWORD=.*/REDIS_PASSWORD=null/g' .env
artifacts:
paths:
- .env
- config/sumas.json
- config/sumasEn.json
- config/blacklistUrl.txt
- config/blacklistDomains.txt
- config/adBlacklistUrl.txt
- config/adBlacklistDomains.txt
- config/spam.txt
- database/seeds/UsersSeeder.php
- database/useragents.sqlite
only:
- master@open-source/MetaGer
- branches
- tags
prepare_node:
stage: prepare
image: node:10
before_script:
# Abhängigkeiten überprüfen
- which composer
- which git
- which php
- which sqlite3
- npm install
script:
- sh build.sh
variables:
STAGE: production
update(metager2):
tags:
- metager2
- npm run prod
artifacts:
paths:
- public/js/
- public/css/
- public/mix-manifest.json
cache:
# Cache per Branch
key: "node-$CI_JOB_STAGE-$CI_COMMIT_REF_SLUG"
paths:
- node_modules
only:
- master@open-source/MetaGer
before_script:
# Abhängigkeiten überprüfen
- which composer
- which git
- which php
- which sqlite3
- branches
- tags
prepare_composer:
stage: prepare
image: prooph/composer:7.3
script:
- sh build.sh
- composer install
artifacts:
paths:
- vendor
cache:
key: "composer-$CI_JOB_STAGE-$CI_COMMIT_REF_SLUG"
paths:
- vendor
review:
variables:
STAGE: production
update(metager3.de):
tags:
- metager3
only:
- development@open-source/MetaGer
before_script:
# Abhängigkeiten überprüfen
- which composer
- which git
- which php
- which sqlite3
HELM_UPGRADE_VALUES_FILE: .gitlab/review-apps-values.yaml
ROLLOUT_RESOURCE_TYPE: deployment
except:
refs:
- master
- development
variables:
- $REVIEW_DISABLED
stop_review:
except:
refs:
- master
- development
variables:
- $REVIEW_DISABLED
.development: &development_template
extends: .auto-deploy
stage: development
script:
- sh build.sh
- auto-deploy check_kube_domain
- auto-deploy download_chart
- auto-deploy ensure_namespace
- auto-deploy initialize_tiller
- auto-deploy create_secret
- auto-deploy deploy
- auto-deploy delete canary
- auto-deploy delete rollout
- auto-deploy persist_environment_url
variables:
ADDITIONAL_HOSTS: "www.metager3.de"
HELM_UPGRADE_VALUES_FILE: .gitlab/development-values.yaml
ROLLOUT_RESOURCE_TYPE: deployment
environment:
name: development
url: https://metager3.de
artifacts:
paths: [environment_url.txt]
development:
<<: *development_template
only:
refs:
- development
kubernetes: active
except:
variables:
- $STAGING_ENABLED
- $CANARY_ENABLED
- $INCREMENTAL_ROLLOUT_ENABLED
- $INCREMENTAL_ROLLOUT_MODE
production:
variables:
STAGE: development
\ No newline at end of file
HELM_UPGRADE_EXTRA_ARGS: --set service.externalPort=80 --set service.internalPort=80 --set service.commonName= --set ingress.annotations.certmanager\.k8s\.io/cluster-issuer=letsencrypt-prod
ROLLOUT_RESOURCE_TYPE: deployment
environment:
url: https://metager.de
service:
externalPort: 80
internalPort: 80
ingress:
annotations:
certmanager.k8s.io/cluster-issuer: letsencrypt-prod
nginx.ingress.kubernetes.io/configuration-snippet: |
if ($host = "www.metager3.de") {
return 301 https://metager3.de$request_uri;
}
\ No newline at end of file
---
hpa:
enabled: false
ingress:
annotations:
kubernetes.io/tls-acme: "false"
nginx.ingress.kubernetes.io/ssl-redirect: "false"
tls:
enabled: false
service:
commonName: ""
externalPort: 80
internalPort: 80
FROM debian:buster
FROM nginx
RUN apt-get update && apt-get install -y \
composer \
php7.2 \
php-mbstring \
php7.2-xml\
php-zip \
php-gd \
php-sqlite3 \
php-mysql \
php-curl \
redis-server \
sqlite3 \
nodejs \
libpng-dev \
unzip \
npm
RUN npm install gulp -g
RUN apt -y update && apt -y install php-fpm \
ca-certificates \
cron \
zip \
php7.3-common \
php7.3-curl \
php7.3-mbstring \
php7.3-sqlite3 \
php7.3-mysql \
php7.3-xml \
php7.3-zip \
php7.3-redis \
php7.3-gd \
redis-server
COPY . /app
WORKDIR app
RUN mv config/sumas.xml.example config/sumas.xml && mv .env.example .env
RUN composer install --no-plugins --no-scripts
RUN npm install
RUN npm run dev
RUN sed -i 's/listen.owner = www-data/listen.owner = nginx/g' /etc/php/7.3/fpm/pool.d/www.conf && \
sed -i 's/listen.group = www-data/listen.group = nginx/g' /etc/php/7.3/fpm/pool.d/www.conf && \
sed -i 's/user = www-data/user = nginx/g' /etc/php/7.3/fpm/pool.d/www.conf && \
sed -i 's/group = www-data/group = nginx/g' /etc/php/7.3/fpm/pool.d/www.conf && \
sed -i 's/;cgi.fix_pathinfo=1/cgi.fix_pathinfo=0/g' /etc/php/7.3/fpm/php.ini && \
mkdir /html
RUN php artisan key:generate
# Set correct timezone
RUN ln -fs /usr/share/zoneinfo/Europe/Berlin /etc/localtime && dpkg-reconfigure -f noninteractive tzdata
CMD redis-server --daemonize yes && php artisan serve --host=0.0.0.0
# Add Cronjob for Laravel
RUN (crontab -l ; echo "* * * * * php /html/artisan schedule:run >> /dev/null 2>&1") | crontab
EXPOSE 8000
WORKDIR /html
EXPOSE 80
COPY config/nginx.conf /etc/nginx/nginx.conf
COPY config/nginx-default.conf /etc/nginx/conf.d/default.conf
COPY --chown=root:nginx . /html
CMD chown -R root:nginx storage/logs/metager bootstrap/cache && \
chmod -R g+w storage/logs/metager bootstrap/cache && \
/etc/init.d/cron start && \
/etc/init.d/php7.3-fpm start && \
/etc/init.d/nginx start && \
/etc/init.d/redis-server start && \
su -s /bin/bash -c 'php artisan requests:fetcher' nginx
<?php
namespace App;
use Illuminate\Support\Facades\Redis;
class CacheHelper
{
/**
* MetaGer uses a pretty slow harddrive for the configured cache
* That's why we have some processes running to write cache to disk in parallel
*/
public static function put($key, $value, $timeSeconds)
{
$cacherItem = [
'timeSeconds' => $timeSeconds,
'key' => $key,
'value' => $value,
];
Redis::rpush(\App\Console\Commands\RequestCacher::CACHER_QUEUE, base64_encode(serialize($cacherItem)));
}
}
<?php
namespace App\Console\Commands;
use Illuminate\Console\Command;
class CacheGC extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'cache:gc';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Cleans up every expired cache File';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$cachedir = storage_path('framework/cache');
$lockfile = $cachedir . "/cache.gc";
if (file_exists($lockfile)) {
return;
} else {
touch($lockfile);
}
try {
foreach (new \DirectoryIterator($cachedir) as $fileInfo) {
if ($fileInfo->isDot()) {
continue;
}
$file = $fileInfo->getPathname();
$basename = basename($file);
if (!is_dir($file) && $basename !== "cache.gc" && $basename !== ".gitignore") {
$fp = fopen($file, 'r');
$delete = false;
try {
$time = intval(fread($fp, 10));
if ($time < time()) {
$delete = true;
}
} finally {
fclose($fp);
}
if ($delete) {
unlink($file);
}
} else if (is_dir($file)) {
// Delete Directory if empty
try {
rmdir($file);
} catch (\ErrorException $e) {
}
}
}
} finally {
unlink($lockfile);
}
}
}
<?php
namespace App\Console\Commands;
use Cache;
use Illuminate\Console\Command;
use Illuminate\Support\Facades\Redis;
class RequestCacher extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'requests:cacher';
const CACHER_QUEUE = 'cacher.queue';
protected $shouldRun = true;
/**
* The console command description.
*
* @var string
*/
protected $description = 'Listens to a buffer of fetched search results and writes them to the filesystem cache.';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
pcntl_async_signals(true);
pcntl_signal(SIGINT, [$this, "sig_handler"]);
pcntl_signal(SIGTERM, [$this, "sig_handler"]);
pcntl_signal(SIGHUP, [$this, "sig_handler"]);
while ($this->shouldRun) {
$cacheItem = Redis::blpop(self::CACHER_QUEUE, 1);
if (!empty($cacheItem)) {
$cacheItem = unserialize(base64_decode($cacheItem[1]));
if (empty($cacheItem["value"])) {
$cacheItem["value"] = "no-result";
}
Cache::put($cacheItem["key"], $cacheItem["value"], now()->addSeconds($cacheItem["timeSeconds"]));
}
}
}
public function sig_handler($sig)
{
$this->shouldRun = false;
echo ("Terminating Cacher Process\n");
}
}
<?php
namespace App\Console\Commands;
use Artisan;
use Illuminate\Console\Command;
use Illuminate\Support\Facades\Redis;
use Log;
class RequestFetcher extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'requests:fetcher';
/**
* The console command description.
*
* @var string
*/
protected $description = 'This commands fetches requests to the installed search engines';
protected $shouldRun = true;
protected $multicurl = null;
protected $proxyhost, $proxyuser, $proxypassword;
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
$this->multicurl = curl_multi_init();
$this->proxyhost = env("PROXY_HOST", "");
$this->proxyport = env("PROXY_PORT", "");
$this->proxyuser = env("PROXY_USER", "");
$this->proxypassword = env("PROXY_PASSWORD", "");
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$pids = [];
$pid = null;
for ($i = 0; $i < 5; $i++) {
$pid = \pcntl_fork();
$pids[] = $pid;
if ($pid === 0) {
break;
}
}
if ($pid === 0) {
Artisan::call('requests:cacher');
exit;
} else {
pcntl_async_signals(true);
pcntl_signal(SIGINT, [$this, "sig_handler"]);
pcntl_signal(SIGTERM, [$this, "sig_handler"]);
pcntl_signal(SIGHUP, [$this, "sig_handler"]);
}
try {
$blocking = false;
while ($this->shouldRun) {
$status = curl_multi_exec($this->multicurl, $active);
$currentJob = null;
if (!$blocking) {
$currentJob = Redis::lpop(\App\MetaGer::FETCHQUEUE_KEY);
} else {
$currentJob = Redis::blpop(\App\MetaGer::FETCHQUEUE_KEY, 1);
if (!empty($currentJob)) {
$currentJob = $currentJob[1];
}
}
if (!empty($currentJob)) {
$currentJob = json_decode($currentJob, true);
$ch = $this->getCurlHandle($currentJob);
curl_multi_add_handle($this->multicurl, $ch);
$blocking = false;
$active = true;
}
$answerRead = false;
while (($info = curl_multi_info_read($this->multicurl)) !== false) {
$answerRead = true;
$infos = curl_getinfo($info["handle"], CURLINFO_PRIVATE);
$infos = explode(";", $infos);
$resulthash = $infos[0];
$cacheDurationMinutes = intval($infos[1]);
$responseCode = curl_getinfo($info["handle"], CURLINFO_HTTP_CODE);
$body = "";
$error = curl_error($info["handle"]);
if (!empty($error)) {
Log::error($error);
}
if ($responseCode !== 200) {
Log::debug("Got responsecode " . $responseCode . " fetching \"" . curl_getinfo($info["handle"], CURLINFO_EFFECTIVE_URL) . "\n");
} else {
$body = \curl_multi_getcontent($info["handle"]);
}
Redis::pipeline(function ($pipe) use ($resulthash, $body, $cacheDurationMinutes) {
$pipe->set($resulthash, $body);
$pipe->expire($resulthash, 60);
$cacherItem = [
'timeSeconds' => $cacheDurationMinutes * 60,
'key' => $resulthash,
'value' => $body,