diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3826bb59f22..e2359069e27 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,13 +1,20 @@ name: build -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] +on: [ push, pull_request ] jobs: setup: runs-on: ubuntu-20.04 steps: + # Required for workflow triggers like the auto-label for failing PRs + - name: Save PR number + if: github.event_name == 'pull_request' + run: | + mkdir -p ./pr + echo ${{ github.event.number }} > ./pr/NR + - uses: actions/upload-artifact@v2 + if: github.event_name == 'pull_request' + with: + name: pr + path: pr/ # Commit branch/name extraction from: # https://github.community/t/accessing-commit-message-in-pull-request-event/17158/8 # @@ -26,6 +33,7 @@ jobs: echo "COMMIT_MESSAGE<> $GITHUB_ENV echo "$(git log --format=%B -n 1 HEAD)" >> $GITHUB_ENV echo "EOF" >> $GITHUB_ENV + echo "PREVIOUS_COMMIT=$(git log --format=%H -n 1 HEAD~1)" >> $GITHUB_ENV # In case of a pull_request event, the commit we care about is HEAD^2, that # is, the second parent of the pull request merge commit. # The current branch name is directly given by GITHUB_HEAD_REF @@ -33,9 +41,27 @@ jobs: if: github.event_name == 'pull_request' run: | echo "BRANCH_NAME=$GITHUB_HEAD_REF" >> $GITHUB_ENV + echo "TARGET_BRANCH_NAME=$(echo ${GITHUB_BASE_REF##*/})" >> $GITHUB_ENV echo "COMMIT_MESSAGE<> $GITHUB_ENV echo "$(git log --format=%B -n 1 HEAD^2)" >> $GITHUB_ENV echo "EOF" >> $GITHUB_ENV + echo "PREVIOUS_COMMIT=$(git log --format=%H -n 1 HEAD^2~1)" >> $GITHUB_ENV + - uses: actions/setup-python@v2 + with: + python-version: '2.7' + architecture: 'x64' + - name: Get all changes vs master + env: + PR_NUMBER: ${{ github.event.pull_request.number }} + run: | + echo "DIFF<> $GITHUB_ENV + echo "$(./toolset/github_actions/github_actions_diff.py)" >> $GITHUB_ENV + echo "EOF" >> $GITHUB_ENV + - name: Determine which (if any) tests need to be run + run: | + echo "RUN_TESTS<> $GITHUB_ENV + echo "$(grep -oP "github-actions-run-tests \K(.*)" <<< $DIFF || true)" >> $GITHUB_ENV + echo "EOF" >> $GITHUB_ENV - id: event_out name: Write event outputs run: | @@ -45,6 +71,8 @@ jobs: COMMIT_MESSAGE="${COMMIT_MESSAGE//$'\r'/'%0D'}" echo "::set-output name=commit_message::$COMMIT_MESSAGE" echo "::set-output name=branch_name::$BRANCH_NAME" + echo "::set-output name=target_branch_name::$TARGET_BRANCH_NAME" + echo "::set-output name=previous_commit::$PREVIOUS_COMMIT" - id: verify_out name: Write verify job matrix run: | @@ -53,15 +81,18 @@ jobs: # include: # - TESTLANG: {lang} # with a TESTLANG object in the include array for each language under frameworks - VERIFY_MATRIX=$(ls -1 frameworks | jq -R | jq -sc "{include: map({TESTLANG: .})}") + VERIFY_MATRIX=$(ls -1 frameworks | jq -Rc '.+"/" | select(inside(env.RUN_TESTS)) | rtrimstr("/")' | jq -sc '{include: map({TESTLANG: .})}') echo "::set-output name=verify_matrix::$VERIFY_MATRIX" outputs: commit_message: ${{ steps.event_out.outputs.commit_message }} branch_name: ${{ steps.event_out.outputs.branch_name }} + target_branch_name: ${{ steps.event_out.outputs.target_branch_name }} + previous_commit: ${{ steps.event_out.outputs.previous_commit }} verify_matrix: ${{ steps.verify_out.outputs.verify_matrix }} verify: needs: setup - if: ${{ !contains(needs.setup.outputs.commit_message, '[ci skip]') }} + # The matrix check is necessary because an empty job matrix is otherwise considered a workflow failure + if: ${{ !contains(needs.setup.outputs.commit_message, '[ci skip]') && contains(needs.setup.outputs.verify_matrix, 'TESTLANG') }} runs-on: ubuntu-18.04 strategy: matrix: ${{ fromJSON(needs.setup.outputs.verify_matrix) }} @@ -73,6 +104,8 @@ jobs: TESTDIR: ${{ matrix.TESTDIR }} COMMIT_MESSAGE: ${{ needs.setup.outputs.commit_message }} BRANCH_NAME: ${{ needs.setup.outputs.branch_name }} + TARGET_BRANCH_NAME: ${{ needs.setup.outputs.target_branch_name }} + PREVIOUS_COMMIT: ${{ needs.setup.outputs.previous_commit }} PR_NUMBER: ${{ github.event.pull_request.number }} steps: - uses: actions/checkout@v2 @@ -82,8 +115,7 @@ jobs: with: python-version: '2.7' architecture: 'x64' - - id: get_diff - name: Get all changes vs master + - name: Get all changes vs master # Runs github_actions_diff, with the the output accessible in later steps run: | # Write the result to env.DIFF for later steps diff --git a/.github/workflows/label-failing-pr.yml b/.github/workflows/label-failing-pr.yml new file mode 100644 index 00000000000..a29289a574f --- /dev/null +++ b/.github/workflows/label-failing-pr.yml @@ -0,0 +1,46 @@ +name: Label PR if failed +on: + workflow_run: + workflows: [ "build" ] + types: + - completed +jobs: + apply_label: + if: ${{ github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'failure' }} + runs-on: ubuntu-20.04 + steps: + - name: 'Download artifact' + uses: actions/github-script@v3.1.0 + with: + # scripts lightly modified from https://securitylab.github.com/research/github-actions-preventing-pwn-requests + script: | + var artifacts = await github.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{github.event.workflow_run.id }}, + }); + var matchArtifact = artifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr" + })[0]; + var download = await github.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + var fs = require('fs'); + fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); + - run: unzip pr.zip + - name: Label PR + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + var fs = require('fs'); + var issue_number = Number(fs.readFileSync('./NR')); + await github.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issue_number, + labels: ['PR: Please Update'] + }); diff --git a/frameworks/C++/cutelyst/build.sh b/frameworks/C++/cutelyst/build.sh index c961ef4cfb7..28f1b47bf6e 100755 --- a/frameworks/C++/cutelyst/build.sh +++ b/frameworks/C++/cutelyst/build.sh @@ -1,8 +1,8 @@ #!/bin/bash -export ASQL_VER=0.19.0 +export ASQL_VER=0.26.0 export CUTELEE_VER=5.3.0 -export CUTELYST_VER=2.13.0 +export CUTELYST_VER=2.14.0 apt update -qq && \ apt install -yqq --no-install-recommends \ diff --git a/frameworks/C++/cutelyst/src/CMakeLists.txt b/frameworks/C++/cutelyst/src/CMakeLists.txt index ba9768c1c27..55199452dec 100644 --- a/frameworks/C++/cutelyst/src/CMakeLists.txt +++ b/frameworks/C++/cutelyst/src/CMakeLists.txt @@ -2,9 +2,14 @@ cmake_minimum_required(VERSION 3.6.0 FATAL_ERROR) project(cutelyst_benchmarks LANGUAGES CXX) +cmake_policy(SET CMP0069 NEW) + find_package(Qt5 5.6.0 REQUIRED COMPONENTS Core Network Sql) find_package(ASqlQt5 0.15.0 REQUIRED) find_package(Cutelyst2Qt5 2.12 REQUIRED) +find_package(Cutelee5 REQUIRED) +find_package(PostgreSQL REQUIRED) +find_package(mimalloc 1.0) # Auto generate moc files set(CMAKE_AUTOMOC ON) @@ -53,10 +58,18 @@ target_link_libraries(cutelyst_benchmarks ASqlQt5::Core ) -add_executable(cutelyst-benchmarks main.cpp) +add_executable(cutelyst-benchmarks ${cutelyst_benchmarks_SRCS} main.cpp) target_link_libraries(cutelyst-benchmarks + PUBLIC Cutelyst::Core Cutelyst::WSGI + Cutelyst::Utils::Sql + Cutelyst::View::Cutelee + Qt5::Core Qt5::Network - cutelyst_benchmarks + Qt5::Sql + ASqlQt5::Core ) +if (mimalloc_FOUND) + target_link_libraries(cutelyst-benchmarks PUBLIC mimalloc) +endif () diff --git a/frameworks/C++/cutelyst/src/databaseupdatestest.cpp b/frameworks/C++/cutelyst/src/databaseupdatestest.cpp index 6d60c10cd00..d50aeeeb0a5 100644 --- a/frameworks/C++/cutelyst/src/databaseupdatestest.cpp +++ b/frameworks/C++/cutelyst/src/databaseupdatestest.cpp @@ -12,6 +12,8 @@ #include #include +#include + DatabaseUpdatesTest::DatabaseUpdatesTest(QObject *parent) : Controller(parent) { @@ -41,14 +43,14 @@ void DatabaseUpdatesTest::updatep(Context *c) db.execPrepared(APreparedQueryLiteral("SELECT randomNumber, id FROM world WHERE id=$1"), {id}, [c, async] (AResult &result) { - if (Q_UNLIKELY(result.error() && !result.size())) { + if (Q_UNLIKELY(result.error() || !result.size())) { c->res()->setStatus(Response::InternalServerError); return; } }, c); db.execPrepared(APreparedQueryLiteral("UPDATE world SET randomNumber=$1 WHERE id=$2"), {randomNumber, id}, [c, async] (AResult &result) { - if (Q_UNLIKELY(result.error() && !result.size())) { + if (Q_UNLIKELY(result.error())) { c->res()->setStatus(Response::InternalServerError); return; } @@ -89,7 +91,7 @@ void DatabaseUpdatesTest::updateb(Context *c) db.execPrepared(APreparedQueryLiteral("SELECT randomNumber, id FROM world WHERE id=$1"), {id}, [c, async] (AResult &result) { - if (Q_UNLIKELY(result.error() && !result.size())) { + if (Q_UNLIKELY(result.error() || !result.size())) { c->res()->setStatus(Response::InternalServerError); return; } @@ -99,7 +101,7 @@ void DatabaseUpdatesTest::updateb(Context *c) const APreparedQuery pq = getSql(queries); db.execPrepared(pq, args, [c, async] (AResult &result) { - if (Q_UNLIKELY(result.error() && !result.size())) { + if (Q_UNLIKELY(result.error())) { c->res()->setStatus(Response::InternalServerError); return; } @@ -189,9 +191,13 @@ APreparedQuery DatabaseUpdatesTest::getSql(int count) sql.append(QStringLiteral("ELSE randomnumber END WHERE id IN (")); for (int i = 0; i < count; i++) { - sql.append(QLatin1Char('$') + QString::number(placeholdersCounter)); + sql.append(QLatin1Char('$') + QString::number(placeholdersCounter) + QLatin1Char(',')); ++placeholdersCounter; } + + if (count) { + sql.remove(sql.size() - 1, 1); + } sql.append(QLatin1Char(')')); m_sqlMap.insert(count, sql); diff --git a/frameworks/C++/lithium/benchmark_config.json b/frameworks/C++/lithium/benchmark_config.json index 1ff65ba7363..78ccbe25ac2 100755 --- a/frameworks/C++/lithium/benchmark_config.json +++ b/frameworks/C++/lithium/benchmark_config.json @@ -94,6 +94,28 @@ "display_name": "Lithium-postgres-batch", "notes": "", "versus": "None" + }, + + "postgres-batch-beta": { + "db_url" : "/db", + "query_url" : "/queries?N=", + "fortune_url" : "/fortunes", + "update_url" : "/updates?N=", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "Postgres", + "framework": "Lithium", + "language": "C++", + "flavor": "None", + "orm": "Full", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Lithium-postgres-batch-beta", + "notes": "", + "versus": "None" } } diff --git a/frameworks/C++/lithium/compile-batch.sh b/frameworks/C++/lithium/compile-batch.sh index 41f0f4c1032..fc1b5388a9d 100755 --- a/frameworks/C++/lithium/compile-batch.sh +++ b/frameworks/C++/lithium/compile-batch.sh @@ -2,7 +2,11 @@ DB_FLAG=$1 COMMIT=10b72ad8ce16528ff642f802e266a975964d06f9 -MONOTHREAD=$2 +COMMIT="$2" +MONOTHREAD=$2 +if [ "$COMMIT" = "" ]; then + COMMIT="10b72ad8ce16528ff642f802e266a975964d06f9" +fi if [ $DB_FLAG = "TFB_MYSQL" ]; then echo "ERROR: Only Postgres has pipelining support for now." @@ -17,9 +21,9 @@ fi wget https://raw.githubusercontent.com/matt-42/lithium/$COMMIT/single_headers/lithium_http_backend.hh -clang++ -fprofile-instr-generate=./profile.prof -flto -DLITHIUM_SERVER_NAME=l -DPROFILE_MODE -DN_SQL_CONNECTIONS=1 -DMONOTHREAD=$MONOTHREAD -DNDEBUG -D$DB_FLAG -O3 -march=native -std=c++17 ./lithium_batch.cc $CXX_FLAGS -lpthread -lboost_context -lssl -lcrypto -o /lithium_tbf +clang++ -fprofile-instr-generate=./profile.prof -flto -DLITHIUM_SERVER_NAME=l -DPROFILE_MODE -DN_SQL_CONNECTIONS=1 -DNDEBUG -D$DB_FLAG -O3 -march=native -std=c++17 ./lithium_batch.cc $CXX_FLAGS -lpthread -lboost_context -lssl -lcrypto -o /lithium_tbf /lithium_tbf tfb-database 8081 llvm-profdata-10 merge -output=./profile.pgo ./profile.prof -clang++ -fprofile-instr-use=./profile.pgo -flto -DLITHIUM_SERVER_NAME=l -DNDEBUG -D$DB_FLAG -DN_SQL_CONNECTIONS=1 -DMONOTHREAD=$MONOTHREAD -O3 -march=native -std=c++17 ./lithium_batch.cc $CXX_FLAGS -lpthread -lboost_context -lssl -lcrypto -o /lithium_tbf +clang++ -fprofile-instr-use=./profile.pgo -flto -DLITHIUM_SERVER_NAME=l -DNDEBUG -D$DB_FLAG -DN_SQL_CONNECTIONS=1 -O3 -march=native -std=c++17 ./lithium_batch.cc $CXX_FLAGS -lpthread -lboost_context -lssl -lcrypto -o /lithium_tbf /lithium_tbf tfb-database 8080 diff --git a/frameworks/C++/lithium/compile.sh b/frameworks/C++/lithium/compile.sh index 94a6fa6097e..7f216ed4475 100755 --- a/frameworks/C++/lithium/compile.sh +++ b/frameworks/C++/lithium/compile.sh @@ -4,7 +4,7 @@ DB_FLAG=$1 COMMIT="$2" if [ "$COMMIT" = "" ]; then - COMMIT="e199537f15abce7cf25c0b4de0f14461a1f426ec" + COMMIT="a046b3345098157849d9e2ab49a475aaabf4a90f" fi if [ $DB_FLAG = "TFB_MYSQL" ]; then diff --git a/frameworks/C++/lithium/config.toml b/frameworks/C++/lithium/config.toml index 22daac9050e..ac4e73f100c 100644 --- a/frameworks/C++/lithium/config.toml +++ b/frameworks/C++/lithium/config.toml @@ -1,5 +1,7 @@ [framework] name = "lithium" +authors = ["Matthieu Garrigues "] +github = "https://github.com/matt-42/lithium" [main] urls.plaintext = "/plaintext" @@ -69,3 +71,18 @@ orm = "Full" platform = "None" webserver = "None" versus = "None" + +[postgres-batch-beta] +urls.db = "/db" +urls.query = "/queries?N=" +urls.update = "/updates?N=" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Micro" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Full" +platform = "None" +webserver = "None" +versus = "None" diff --git a/frameworks/C++/lithium/lithium-postgres-batch-beta.dockerfile b/frameworks/C++/lithium/lithium-postgres-batch-beta.dockerfile new file mode 100644 index 00000000000..a3ff0b916d2 --- /dev/null +++ b/frameworks/C++/lithium/lithium-postgres-batch-beta.dockerfile @@ -0,0 +1,13 @@ + +FROM buildpack-deps:focal + +RUN apt-get update -yqq +RUN apt-get install -yqq clang libboost-context-dev libboost-dev wget +RUN apt-get install -yqq bison flex + +COPY ./ ./ + +RUN ./compile_libpq.sh batchmode +ENV LD_LIBRARY_PATH=/usr/lib + +CMD ./compile-batch.sh TFB_PGSQL 16b3f43a2b292e5349b7b528fa9410b3239c15cb diff --git a/frameworks/C++/lithium/lithium-postgres-batch.dockerfile b/frameworks/C++/lithium/lithium-postgres-batch.dockerfile index fefaca93b9e..61130fbb328 100644 --- a/frameworks/C++/lithium/lithium-postgres-batch.dockerfile +++ b/frameworks/C++/lithium/lithium-postgres-batch.dockerfile @@ -11,4 +11,4 @@ ENV LD_LIBRARY_PATH=/usr/lib EXPOSE 8080 -CMD ./compile-batch.sh TFB_PGSQL 0 +CMD ./compile-batch.sh TFB_PGSQL \ No newline at end of file diff --git a/frameworks/C++/lithium/lithium-postgres-beta.dockerfile b/frameworks/C++/lithium/lithium-postgres-beta.dockerfile index 29554dc220f..c24faf3b672 100644 --- a/frameworks/C++/lithium/lithium-postgres-beta.dockerfile +++ b/frameworks/C++/lithium/lithium-postgres-beta.dockerfile @@ -11,4 +11,4 @@ ENV LD_LIBRARY_PATH=/usr/lib EXPOSE 8080 -CMD ./compile.sh TFB_PGSQL 667f439040821d981923492e59749b87005268e4 +CMD ./compile.sh TFB_PGSQL a046b3345098157849d9e2ab49a475aaabf4a90f diff --git a/frameworks/C/lwan/Makefile b/frameworks/C/lwan/Makefile index 37a94f59e4a..f39433a82fc 100644 --- a/frameworks/C/lwan/Makefile +++ b/frameworks/C/lwan/Makefile @@ -6,8 +6,7 @@ CFLAGS = -mtune=native -march=native -O3 -fno-plt -flto -ffat-lto-objects -DNDEB -I /lwan/src/lib \ `pkg-config mariadb --cflags` \ `pkg-config sqlite3 --cflags` \ - `pkg-config luajit --cflags` \ - -fauto-profile=/lwan/src/gcda/techempower.gcov + `pkg-config luajit --cflags` LDFLAGS = -mtune=native -march=native -O3 -flto -ffat-lto-objects -Wl,-z,now,-z,relro \ -Wl,-whole-archive /lwan/build/src/lib/liblwan.a -Wl,-no-whole-archive \ diff --git a/frameworks/C/lwan/lwan-lua.dockerfile b/frameworks/C/lwan/lwan-lua.dockerfile index 180bba6d490..84d8f880243 100644 --- a/frameworks/C/lwan/lwan-lua.dockerfile +++ b/frameworks/C/lwan/lwan-lua.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:19.10 +FROM ubuntu:20.10 RUN apt-get update -yqq && \ apt-get install -yqq \ @@ -18,7 +18,7 @@ RUN mkdir luajit && \ cd luajit && \ PREFIX=/usr CFLAGS="-O3 -mtune=native -march=native -flto -ffat-lto-objects" make -j install -RUN wget https://github.com/lpereira/lwan/archive/4068da5ce808c279fe1921daa52bfd728229a434.tar.gz -O - | tar xz --strip-components=1 && \ +RUN wget https://github.com/lpereira/lwan/archive/3bb25e235e76028d7687ae4636a059474f42b19c.tar.gz -O - | tar xz --strip-components=1 && \ mkdir build && cd build && \ cmake /lwan -DCMAKE_BUILD_TYPE=Release -DUSE_ALTERNATIVE_MALLOC=mimalloc && \ make lwan-static diff --git a/frameworks/C/lwan/lwan.dockerfile b/frameworks/C/lwan/lwan.dockerfile index 6bef83b801b..cb52ac89e1e 100644 --- a/frameworks/C/lwan/lwan.dockerfile +++ b/frameworks/C/lwan/lwan.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:19.10 +FROM ubuntu:20.10 RUN apt-get update -yqq && \ apt-get install -yqq \ @@ -18,7 +18,7 @@ RUN mkdir luajit && \ cd luajit && \ PREFIX=/usr CFLAGS="-O3 -mtune=native -march=native -flto -ffat-lto-objects" make -j install -RUN wget https://github.com/lpereira/lwan/archive/4068da5ce808c279fe1921daa52bfd728229a434.tar.gz -O - | tar xz --strip-components=1 && \ +RUN wget https://github.com/lpereira/lwan/archive/3bb25e235e76028d7687ae4636a059474f42b19c.tar.gz -O - | tar xz --strip-components=1 && \ mkdir build && cd build && \ cmake /lwan -DCMAKE_BUILD_TYPE=Release -DUSE_ALTERNATIVE_MALLOC=mimalloc && \ make lwan-static diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpHandler.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpHandler.cs index 2789b9de349..247cd75b74c 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpHandler.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpHandler.cs @@ -312,7 +312,7 @@ private void OnCompleted(PipeStream stream, ISession session, HttpToken token) { token.FullLength((stream.CacheLength - token.ContentPostion).ToString()); - if (token.Requests.IsEmpty) + if (token.Requests.IsEmpty && stream.Length == 0) session.Stream.Flush(); } diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs index 17d5a7b7648..ce08cbb0c8a 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs @@ -20,9 +20,16 @@ public virtual Task StartAsync(CancellationToken cancellationToken) serverOptions.LogLevel = LogType.Error; serverOptions.DefaultListen.Port = 8080; serverOptions.Statistical = false; - serverOptions.BufferSize = 1024 * 8; serverOptions.BufferPoolMaxMemory = 1000; serverOptions.BufferPoolSize = 1024 * 10; + if (Program.Debug) + { + serverOptions.BufferSize = 1024 * 16; + } + else + { + serverOptions.BufferSize = 1024 * 8; + } ApiServer = SocketFactory.CreateTcpServer(serverOptions); ApiServer.Open(); if (!Program.UpDB) diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs index 4cbbc9126aa..1abd6e06489 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs @@ -4,6 +4,8 @@ using System.Collections.Generic; using System.Globalization; using System.Text; +using System.Text.Encodings.Web; +using System.Text.Unicode; using System.Threading.Tasks; namespace PlatformBenchmarks @@ -17,10 +19,23 @@ public partial class HttpHandler private readonly static AsciiString _fortunesRowEnd = ""; private readonly static AsciiString _fortunesTableEnd = ""; + [ThreadStatic] + private static char[] mHtmlEncodeBuffer; + + protected HtmlEncoder HtmlEncoder { get; } = CreateHtmlEncoder(); + + private static HtmlEncoder CreateHtmlEncoder() + { + var settings = new TextEncoderSettings(UnicodeRanges.BasicLatin, UnicodeRanges.Katakana, UnicodeRanges.Hiragana); + settings.AllowCharacter('\u2014'); // allow EM DASH through + return HtmlEncoder.Create(settings); + } + public async Task fortunes(PipeStream stream, HttpToken token, ISession session) { try { + var data = await token.Db.LoadFortunesRows(); stream.Write(_fortunesTableStart.Data, 0, _fortunesTableStart.Length); foreach (var item in data) @@ -28,7 +43,11 @@ public async Task fortunes(PipeStream stream, HttpToken token, ISession session) stream.Write(_fortunesRowStart.Data, 0, _fortunesRowStart.Length); stream.Write(item.Id.ToString(CultureInfo.InvariantCulture)); stream.Write(_fortunesColumn.Data, 0, _fortunesColumn.Length); - stream.Write(System.Web.HttpUtility.HtmlEncode(item.Message)); + if (mHtmlEncodeBuffer == null) + mHtmlEncodeBuffer = new char[1024]; + HtmlEncoder.Encode(item.Message, mHtmlEncodeBuffer, out int consumed, out int writtens); + //stream.Write(HtmlEncoder.Encode(item.Message)); + stream.Write(new ArraySegment(mHtmlEncodeBuffer, 0, writtens)); stream.Write(_fortunesRowEnd.Data, 0, _fortunesRowEnd.Length); } stream.Write(_fortunesTableEnd.Data, 0, _fortunesTableEnd.Length); diff --git a/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj b/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj index 8ee352f981c..77a621988a7 100644 --- a/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj +++ b/frameworks/CSharp/genhttp/Benchmarks/Benchmarks.csproj @@ -26,10 +26,10 @@ - - - - + + + + diff --git a/frameworks/CSharp/genhttp/Benchmarks/Program.cs b/frameworks/CSharp/genhttp/Benchmarks/Program.cs index 84cf2684064..efd8a162ac9 100644 --- a/frameworks/CSharp/genhttp/Benchmarks/Program.cs +++ b/frameworks/CSharp/genhttp/Benchmarks/Program.cs @@ -5,6 +5,7 @@ using GenHTTP.Modules.Webservices; using Benchmarks.Tests; +using Benchmarks.Utilities; namespace Benchmarks { @@ -21,7 +22,8 @@ public static int Main(string[] args) .AddService("db") .AddService("queries") .AddService("updates") - .AddService("cached-worlds"); + .AddService("cached-worlds") + .Add(ServerHeader.Create()); return Host.Create() .Handler(tests) diff --git a/frameworks/CSharp/genhttp/Benchmarks/Tests/FortuneHandler.cs b/frameworks/CSharp/genhttp/Benchmarks/Tests/FortuneHandler.cs index a7fb5bbeee8..a397d68c9ae 100644 --- a/frameworks/CSharp/genhttp/Benchmarks/Tests/FortuneHandler.cs +++ b/frameworks/CSharp/genhttp/Benchmarks/Tests/FortuneHandler.cs @@ -49,6 +49,7 @@ public FortuneModel(IRequest request, IHandler handler, List cookies) : public class FortuneHandler : IHandler, IPageRenderer { + private static readonly FlexibleContentType CONTENT_TYPE = new FlexibleContentType("text/html; charset=utf-8"); #region Get-/Setters @@ -87,7 +88,7 @@ public async ValueTask RenderAsync(TemplateModel model) { return model.Request.Respond() .Content(await Template.RenderAsync(model)) - .Header("Content-Type", "text/html; charset=utf-8"); + .Type(CONTENT_TYPE); } private async ValueTask GetFortunes(IRequest request, IHandler handler) diff --git a/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeader.cs b/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeader.cs new file mode 100644 index 00000000000..7fff879379e --- /dev/null +++ b/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeader.cs @@ -0,0 +1,11 @@ +namespace Benchmarks.Utilities +{ + + public static class ServerHeader + { + + public static ServerHeaderConcernBuilder Create() => new ServerHeaderConcernBuilder(); + + } + +} diff --git a/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeaderConcern.cs b/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeaderConcern.cs new file mode 100644 index 00000000000..d4020d4fd7e --- /dev/null +++ b/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeaderConcern.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; + +using GenHTTP.Api.Content; +using GenHTTP.Api.Protocol; + +namespace Benchmarks.Utilities +{ + + public sealed class ServerHeaderConcern : IConcern + { + + #region Get-/Setters + + public IHandler Content { get; } + + public IHandler Parent { get; } + + #endregion + + #region Initialization + + public ServerHeaderConcern(IHandler parent, Func contentFactory) + { + Parent = parent; + Content = contentFactory(this); + } + + #endregion + + #region Functionality + + public IEnumerable GetContent(IRequest request) => Content.GetContent(request); + + public ValueTask PrepareAsync() => Content.PrepareAsync(); + + public async ValueTask HandleAsync(IRequest request) + { + var response = await Content.HandleAsync(request); + + if (response != null) + { + response.Headers.Add("Server", "TFB"); + } + + return response; + } + + #endregion + + } + +} diff --git a/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeaderConcernBuilder.cs b/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeaderConcernBuilder.cs new file mode 100644 index 00000000000..b935ac66553 --- /dev/null +++ b/frameworks/CSharp/genhttp/Benchmarks/Utilities/ServerHeaderConcernBuilder.cs @@ -0,0 +1,18 @@ +using System; + +using GenHTTP.Api.Content; + +namespace Benchmarks.Utilities +{ + + public sealed class ServerHeaderConcernBuilder : IConcernBuilder + { + + public IConcern Build(IHandler parent, Func contentFactory) + { + return new ServerHeaderConcern(parent, contentFactory); + } + + } + +} diff --git a/frameworks/CSharp/netcoreserver/.gitignore b/frameworks/CSharp/netcoreserver/.gitignore new file mode 100644 index 00000000000..c8d1eed0116 --- /dev/null +++ b/frameworks/CSharp/netcoreserver/.gitignore @@ -0,0 +1,38 @@ +[Oo]bj/ +[Bb]in/ +TestResults/ +.nuget/ +*.sln +*.sln.ide/ +_ReSharper.*/ +.idea/ +packages/ +artifacts/ +PublishProfiles/ +.vs/ +*.user +*.suo +*.cache +*.docstates +_ReSharper.* +nuget.exe +*net45.csproj +*net451.csproj +*k10.csproj +*.psess +*.vsp +*.pidb +*.userprefs +*DS_Store +*.ncrunchsolution +*.*sdf +*.ipch +*.swp +*~ +.build/ +.testPublish/ +launchSettings.json +BenchmarkDotNet.Artifacts/ +BDN.Generated/ +binaries/ +global.json diff --git a/frameworks/CSharp/netcoreserver/Benchmarks/Benchmarks.csproj b/frameworks/CSharp/netcoreserver/Benchmarks/Benchmarks.csproj new file mode 100644 index 00000000000..6d372b01ee3 --- /dev/null +++ b/frameworks/CSharp/netcoreserver/Benchmarks/Benchmarks.csproj @@ -0,0 +1,24 @@ + + + + + net5.0 + 9.0 + + EmbedIO Benchmarks + Test suite to be executed with TechEmpower FrameworkBenchmarks. + + Benchmarks.Program + Exe + + true + false + + + + + + + + + \ No newline at end of file diff --git a/frameworks/CSharp/netcoreserver/Benchmarks/HttpBenchmarkServer.cs b/frameworks/CSharp/netcoreserver/Benchmarks/HttpBenchmarkServer.cs new file mode 100644 index 00000000000..fcbd209e2db --- /dev/null +++ b/frameworks/CSharp/netcoreserver/Benchmarks/HttpBenchmarkServer.cs @@ -0,0 +1,24 @@ +using System; +using System.Net; +using System.Net.Sockets; + +using NetCoreServer; + +namespace Benchmarks +{ + + public class HttpBenchmarkServer : HttpServer + { + + public HttpBenchmarkServer(IPAddress address, int port) : base(address, port) { } + + protected override TcpSession CreateSession() { return new HttpBenchmarkSession(this); } + + protected override void OnError(SocketError error) + { + Console.WriteLine($"HTTP session caught an error: {error}"); + } + + } + +} diff --git a/frameworks/CSharp/netcoreserver/Benchmarks/HttpBenchmarkSession.cs b/frameworks/CSharp/netcoreserver/Benchmarks/HttpBenchmarkSession.cs new file mode 100644 index 00000000000..25e58bd5aa1 --- /dev/null +++ b/frameworks/CSharp/netcoreserver/Benchmarks/HttpBenchmarkSession.cs @@ -0,0 +1,59 @@ +using System; +using System.Net.Sockets; +using System.Text.Json; + +using NetCoreServer; + +namespace Benchmarks +{ + + public class HttpBenchmarkSession : HttpSession + { + + public HttpBenchmarkSession(HttpServer server) : base(server) { } + + protected override void OnReceivedRequest(HttpRequest request) + { + if (request.Url.StartsWith("/plaintext")) + { + SendResponseAsync(MakeResponse("Hello, World!", "text/plain; charset=UTF-8")); + } + else if (request.Url.StartsWith("/json")) + { + var value = new JsonResult() { Message = "Hello, World!" }; + var serialized = JsonSerializer.Serialize(value); + + SendResponseAsync(MakeResponse(serialized, "application/json; charset=UTF-8")); + } + else + { + SendResponseAsync(Response.MakeErrorResponse("Not found", 404)); + } + } + + protected override void OnReceivedRequestError(HttpRequest request, string error) + { + Console.WriteLine($"Request error: {error}"); + } + + protected override void OnError(SocketError error) + { + Console.WriteLine($"HTTP session caught an error: {error}"); + } + + private static HttpResponse MakeResponse(string value, string contentType) + { + var response = new HttpResponse(200, "OK", "HTTP/1.1"); + + response.SetHeader("Server", "NetCoreServer"); + response.SetHeader("Date", DateTime.UtcNow.ToString("r")); + response.SetHeader("Content-Type", contentType); + + response.SetBody(value); + + return response; + } + + } + +} diff --git a/frameworks/CSharp/netcoreserver/Benchmarks/JsonResult.cs b/frameworks/CSharp/netcoreserver/Benchmarks/JsonResult.cs new file mode 100644 index 00000000000..d65ff646f32 --- /dev/null +++ b/frameworks/CSharp/netcoreserver/Benchmarks/JsonResult.cs @@ -0,0 +1,11 @@ +namespace Benchmarks +{ + + public sealed class JsonResult + { + + public string Message { get; set; } + + } + +} diff --git a/frameworks/CSharp/netcoreserver/Benchmarks/Program.cs b/frameworks/CSharp/netcoreserver/Benchmarks/Program.cs new file mode 100644 index 00000000000..411569ffb2f --- /dev/null +++ b/frameworks/CSharp/netcoreserver/Benchmarks/Program.cs @@ -0,0 +1,44 @@ +using System; +using System.Net; +using System.Threading; + +namespace Benchmarks +{ + + public static class Program + { + + private static readonly ManualResetEvent _WaitEvent = new ManualResetEvent(false); + + public static int Main(string[] args) + { + var server = new HttpBenchmarkServer(IPAddress.Any, 8080); + + try + { + AppDomain.CurrentDomain.ProcessExit += (_, __) => + { + _WaitEvent.Set(); + }; + + server.Start(); + + _WaitEvent.WaitOne(); + + return 0; + } + catch (Exception e) + { + Console.WriteLine(e); + + return -1; + } + finally + { + server.Stop(); + } + } + + } + +} diff --git a/frameworks/CSharp/netcoreserver/README.md b/frameworks/CSharp/netcoreserver/README.md new file mode 100644 index 00000000000..985758c2bc9 --- /dev/null +++ b/frameworks/CSharp/netcoreserver/README.md @@ -0,0 +1,22 @@ +# NetCoreServer Tests on Linux + +See the [project website](https://github.com/chronoxor/NetCoreServer) for more information. + +## Infrastructure Software Versions + +**Language** + +* C# 9.0 + +**Platforms** + +* .NET 5 + +**Web Servers** + +* [NetCoreServer](https://github.com/chronoxor/NetCoreServer) + +## Paths & Source for Tests + +* [Plaintext](Benchmarks/HttpBenchmarkSession.cs): "/plaintext" +* [JSON](Benchmarks/HttpBenchmarkSession.cs): "/json" diff --git a/frameworks/CSharp/netcoreserver/benchmark_config.json b/frameworks/CSharp/netcoreserver/benchmark_config.json new file mode 100644 index 00000000000..85f95b0dee6 --- /dev/null +++ b/frameworks/CSharp/netcoreserver/benchmark_config.json @@ -0,0 +1,22 @@ +{ + "framework": "netcoreserver", + "tests": [{ + "default": { + "plaintext_url": "/plaintext", + "json_url": "/json", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "None", + "framework": "NetCoreServer", + "language": "C#", + "orm": "Raw", + "platform": ".NET", + "webserver": "NetCoreServer", + "os": "Linux", + "database_os": "Linux", + "display_name": "NetCoreServer", + "notes": "" + } + }] +} diff --git a/frameworks/CSharp/netcoreserver/config.toml b/frameworks/CSharp/netcoreserver/config.toml new file mode 100644 index 00000000000..43b2cd1518c --- /dev/null +++ b/frameworks/CSharp/netcoreserver/config.toml @@ -0,0 +1,15 @@ +[framework] +name = "netcoreserver" + +[main] +urls.plaintext = "/plaintext" +urls.json = "/json" +approach = "Realistic" +classification = "Fullstack" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = ".NET" +webserver = "NetCoreServer" +versus = "None" diff --git a/frameworks/CSharp/netcoreserver/netcoreserver.dockerfile b/frameworks/CSharp/netcoreserver/netcoreserver.dockerfile new file mode 100644 index 00000000000..675c16d8841 --- /dev/null +++ b/frameworks/CSharp/netcoreserver/netcoreserver.dockerfile @@ -0,0 +1,19 @@ +FROM mcr.microsoft.com/dotnet/sdk:5.0-alpine AS build +WORKDIR /source + +# copy csproj and restore as distinct layers +COPY Benchmarks/*.csproj . +RUN dotnet restore -r linux-musl-x64 + +# copy and publish app and libraries +COPY Benchmarks/ . +RUN dotnet publish -c release -o /app -r linux-musl-x64 + +# final stage/image +FROM mcr.microsoft.com/dotnet/runtime-deps:5.0-alpine +WORKDIR /app +COPY --from=build /app . + +ENTRYPOINT ["./Benchmarks"] + +EXPOSE 8080 \ No newline at end of file diff --git a/frameworks/CSharp/servicestack/src/SelfHost/packages.config b/frameworks/CSharp/servicestack/src/SelfHost/packages.config index 54eeb719658..064e4480bfb 100644 --- a/frameworks/CSharp/servicestack/src/SelfHost/packages.config +++ b/frameworks/CSharp/servicestack/src/SelfHost/packages.config @@ -1,6 +1,6 @@  - + diff --git a/frameworks/CSharp/watson/.gitignore b/frameworks/CSharp/watson/.gitignore new file mode 100644 index 00000000000..c8d1eed0116 --- /dev/null +++ b/frameworks/CSharp/watson/.gitignore @@ -0,0 +1,38 @@ +[Oo]bj/ +[Bb]in/ +TestResults/ +.nuget/ +*.sln +*.sln.ide/ +_ReSharper.*/ +.idea/ +packages/ +artifacts/ +PublishProfiles/ +.vs/ +*.user +*.suo +*.cache +*.docstates +_ReSharper.* +nuget.exe +*net45.csproj +*net451.csproj +*k10.csproj +*.psess +*.vsp +*.pidb +*.userprefs +*DS_Store +*.ncrunchsolution +*.*sdf +*.ipch +*.swp +*~ +.build/ +.testPublish/ +launchSettings.json +BenchmarkDotNet.Artifacts/ +BDN.Generated/ +binaries/ +global.json diff --git a/frameworks/CSharp/watson/Benchmarks/Benchmarks.csproj b/frameworks/CSharp/watson/Benchmarks/Benchmarks.csproj new file mode 100644 index 00000000000..f199ceb3b59 --- /dev/null +++ b/frameworks/CSharp/watson/Benchmarks/Benchmarks.csproj @@ -0,0 +1,24 @@ + + + + + net5.0 + 9.0 + + EmbedIO Benchmarks + Test suite to be executed with TechEmpower FrameworkBenchmarks. + + Benchmarks.Program + Exe + + true + false + + + + + + + + + \ No newline at end of file diff --git a/frameworks/CSharp/watson/Benchmarks/Program.cs b/frameworks/CSharp/watson/Benchmarks/Program.cs new file mode 100644 index 00000000000..99e4d5922e4 --- /dev/null +++ b/frameworks/CSharp/watson/Benchmarks/Program.cs @@ -0,0 +1,89 @@ +using System; +using System.Linq; +using System.Net; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +using WatsonWebserver; + +namespace Benchmarks +{ + + #region Supporting data structures + + public class JsonResult + { + + public string Message { get; set; } + + } + + #endregion + + public static class Program + { + private static readonly ManualResetEvent _WaitEvent = new ManualResetEvent(false); + + public static async Task Main(string[] args) + { +#if DEBUG + var host = "127.0.0.1"; +#else + var host = "tfb-server"; +#endif + + using var server = new Server(host, 8080, false, DefaultRoute); + + server.Routes.Static.Add(HttpMethod.GET, "/plaintext", PlaintextRoute); + server.Routes.Static.Add(HttpMethod.GET, "/json", JsonRoute); + + try + { + AppDomain.CurrentDomain.ProcessExit += (_, __) => + { + _WaitEvent.Set(); + }; + + await server.StartAsync(); + + _WaitEvent.WaitOne(); + + return 0; + } + catch (Exception e) + { + Console.WriteLine(e); + + return -1; + } + } + + static async Task DefaultRoute(HttpContext ctx) + { + ctx.Response.StatusCode = 404; + ctx.Response.StatusDescription = "Not Found"; + + await ctx.Response.Send("Not found."); + } + + static async Task PlaintextRoute(HttpContext ctx) + { + ctx.Response.Headers.Add("Content-Type", "text/plain; charset=UTF-8"); + + await ctx.Response.Send("Hello, World!"); + } + + static async Task JsonRoute(HttpContext ctx) + { + var response = new JsonResult() { Message = "Hello, World!" }; + var serialized = JsonSerializer.Serialize(response); + + ctx.Response.Headers.Add("Content-Type", "application/json; charset=UTF-8"); + + await ctx.Response.Send(serialized); + } + + } + +} diff --git a/frameworks/CSharp/watson/README.md b/frameworks/CSharp/watson/README.md new file mode 100644 index 00000000000..549fa94b793 --- /dev/null +++ b/frameworks/CSharp/watson/README.md @@ -0,0 +1,22 @@ +# Watson Webserver Tests on Linux + +See the [project website](https://github.com/jchristn/WatsonWebserver) for more information. + +## Infrastructure Software Versions + +**Language** + +* C# 9.0 + +**Platforms** + +* .NET 5 + +**Web Servers** + +* [Watson Webserver](https://github.com/jchristn/WatsonWebserver) + +## Paths & Source for Tests + +* [Plaintext](Benchmarks/Program.cs): "/plaintext" +* [JSON](Benchmarks/Program.cs): "/json" diff --git a/frameworks/CSharp/watson/benchmark_config.json b/frameworks/CSharp/watson/benchmark_config.json new file mode 100644 index 00000000000..6017f614566 --- /dev/null +++ b/frameworks/CSharp/watson/benchmark_config.json @@ -0,0 +1,22 @@ +{ + "framework": "watson", + "tests": [{ + "default": { + "plaintext_url": "/plaintext", + "json_url": "/json", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "None", + "framework": "Watson", + "language": "C#", + "orm": "Raw", + "platform": ".NET", + "webserver": "Watson", + "os": "Linux", + "database_os": "Linux", + "display_name": "Watson Webserver", + "notes": "" + } + }] +} diff --git a/frameworks/CSharp/watson/config.toml b/frameworks/CSharp/watson/config.toml new file mode 100644 index 00000000000..59bb70c0727 --- /dev/null +++ b/frameworks/CSharp/watson/config.toml @@ -0,0 +1,15 @@ +[framework] +name = "watson" + +[main] +urls.plaintext = "/plaintext" +urls.json = "/json" +approach = "Realistic" +classification = "Fullstack" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = ".NET" +webserver = "Watson" +versus = "None" diff --git a/frameworks/CSharp/watson/watson.dockerfile b/frameworks/CSharp/watson/watson.dockerfile new file mode 100644 index 00000000000..675c16d8841 --- /dev/null +++ b/frameworks/CSharp/watson/watson.dockerfile @@ -0,0 +1,19 @@ +FROM mcr.microsoft.com/dotnet/sdk:5.0-alpine AS build +WORKDIR /source + +# copy csproj and restore as distinct layers +COPY Benchmarks/*.csproj . +RUN dotnet restore -r linux-musl-x64 + +# copy and publish app and libraries +COPY Benchmarks/ . +RUN dotnet publish -c release -o /app -r linux-musl-x64 + +# final stage/image +FROM mcr.microsoft.com/dotnet/runtime-deps:5.0-alpine +WORKDIR /app +COPY --from=build /app . + +ENTRYPOINT ["./Benchmarks"] + +EXPOSE 8080 \ No newline at end of file diff --git a/frameworks/Java/activej/README.md b/frameworks/Java/activej/README.md new file mode 100755 index 00000000000..fec94fb737a --- /dev/null +++ b/frameworks/Java/activej/README.md @@ -0,0 +1,28 @@ +# ActiveJ Benchmarking Test + +This is the ActiveJ portion of a [benchmarking test suite](../) comparing a variety of web development platforms. + +### Plaintext Test + +* [Plaintext test source](src/main/java/io/activej/http/benchmark/Main.java) + +### JSON Serialization Test + +* [JSON test source](src/main/java/io/activej/http/benchmark/Main.java) + + +## Versions + +* [Java OpenJDK 1.8](http://openjdk.java.net/) +* [ActiveJ 4.0-SNAPSHOT](http://activej.io/) +* [DSL-JSON 1.9.7](https://github.com/ngs-doo/dsl-json) + +## Test URLs + +### Plaintext Test + + http://localhost:8080/plaintext + +### JSON Encoding Test + + http://localhost:8080/json \ No newline at end of file diff --git a/frameworks/Java/activej/activej.dockerfile b/frameworks/Java/activej/activej.dockerfile new file mode 100644 index 00000000000..9a8c88c5071 --- /dev/null +++ b/frameworks/Java/activej/activej.dockerfile @@ -0,0 +1,11 @@ +FROM maven:3.6.1-jdk-11-slim as maven + +WORKDIR /activej +COPY pom.xml pom.xml +COPY src src +RUN mvn compile assembly:single -q + +FROM openjdk:11.0.3-jdk-slim +WORKDIR /activej +COPY --from=maven /activej/target/activej-server-benchmark-0.0.1-SNAPSHOT-jar-with-dependencies.jar app.jar +CMD ["java", "-Xms2G", "-Xmx2G", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:+AggressiveOpts", "-DHttpHeadersMultimap.initialSize=16", "-jar", "app.jar"] \ No newline at end of file diff --git a/frameworks/Java/activej/benchmark_config.json b/frameworks/Java/activej/benchmark_config.json new file mode 100755 index 00000000000..a65f674598e --- /dev/null +++ b/frameworks/Java/activej/benchmark_config.json @@ -0,0 +1,23 @@ +{ + "framework": "activej", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "framework": "activej", + "language": "Java", + "flavor": "None", + "platform": "activej", + "webserver": "None", + "os": "Linux", + "display_name": "activej", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Java/activej/config.toml b/frameworks/Java/activej/config.toml new file mode 100644 index 00000000000..6b6ac6b4969 --- /dev/null +++ b/frameworks/Java/activej/config.toml @@ -0,0 +1,15 @@ +[framework] +name = "activej" + +[main] +urls.plaintext = "/plaintext" +urls.json = "/json" +approach = "Realistic" +classification = "Micro" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "None" +platform = "activej" +webserver = "None" +versus = "" diff --git a/frameworks/Java/activej/pom.xml b/frameworks/Java/activej/pom.xml new file mode 100755 index 00000000000..dd7e6047904 --- /dev/null +++ b/frameworks/Java/activej/pom.xml @@ -0,0 +1,58 @@ + + + 4.0.0 + + io.activej + activej-server-benchmark + 0.0.1-SNAPSHOT + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + maven-assembly-plugin + 3.1.0 + + + + io.activej.http.benchmark.Main + + + + jar-with-dependencies + + + + + make-assembly + package + + single + + + + + + + + + + io.activej + activej-launchers-http + 4.0-beta1 + + + com.dslplatform + dsl-json-java8 + 1.9.7 + + + + \ No newline at end of file diff --git a/frameworks/Java/activej/src/main/java/io/activej/http/benchmark/HelloWorldObject.java b/frameworks/Java/activej/src/main/java/io/activej/http/benchmark/HelloWorldObject.java new file mode 100644 index 00000000000..a0dfb498431 --- /dev/null +++ b/frameworks/Java/activej/src/main/java/io/activej/http/benchmark/HelloWorldObject.java @@ -0,0 +1,13 @@ +package io.activej.http.benchmark; + +public final class HelloWorldObject { + private final String message; + + public HelloWorldObject(String message) { + this.message = message; + } + + public String getMessage() { + return message; + } +} diff --git a/frameworks/Java/activej/src/main/java/io/activej/http/benchmark/Main.java b/frameworks/Java/activej/src/main/java/io/activej/http/benchmark/Main.java new file mode 100644 index 00000000000..4ce3560d5a2 --- /dev/null +++ b/frameworks/Java/activej/src/main/java/io/activej/http/benchmark/Main.java @@ -0,0 +1,128 @@ +package io.activej.http.benchmark; + +import com.dslplatform.json.DslJson; +import com.dslplatform.json.JsonWriter; +import com.dslplatform.json.runtime.Settings; +import io.activej.async.service.EventloopTaskScheduler; +import io.activej.bytebuf.ByteBuf; +import io.activej.config.Config; +import io.activej.eventloop.Eventloop; +import io.activej.http.*; +import io.activej.inject.annotation.Eager; +import io.activej.inject.annotation.Named; +import io.activej.inject.annotation.Provides; +import io.activej.inject.module.AbstractModule; +import io.activej.inject.module.Module; +import io.activej.launchers.http.MultithreadedHttpServerLauncher; +import io.activej.promise.Promise; +import io.activej.worker.annotation.Worker; + +import java.net.InetSocketAddress; +import java.time.Duration; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.concurrent.atomic.AtomicReference; + +import static io.activej.bytebuf.ByteBufStrings.encodeAscii; +import static io.activej.config.Config.ofClassPathProperties; +import static io.activej.config.Config.ofSystemProperties; +import static io.activej.config.converter.ConfigConverters.ofInetSocketAddress; +import static java.nio.charset.StandardCharsets.UTF_8; + +public final class Main extends MultithreadedHttpServerLauncher { + private static final HttpHeaderValue SERVER_HEADER_VALUE = HttpHeaderValue.ofBytes(encodeAscii("X")); + private static final HttpHeaderValue JSON_CONTENT_TYPE_HEADER_VALUE = HttpHeaderValue.ofBytes(encodeAscii("application/json")); + private static final HttpHeaderValue TEXT_CONTENT_TYPE_HEADER_VALUE = HttpHeaderValue.ofBytes(encodeAscii("text/plain")); + + private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("EEE, dd MMM yyyy HH:mm:ss z"); + private static final AtomicReference dateRef = new AtomicReference<>(HttpHeaderValue.ofBytes(encodeAscii(getServerTime()))); + + private static final DslJson DSL_JSON = new DslJson<>(Settings.basicSetup()); + private static final byte[] PLAINTEXT_BYTES = "Hello, World!".getBytes(UTF_8); + + @Provides + @Eager + EventloopTaskScheduler scheduler(Eventloop eventloop) { + return EventloopTaskScheduler.create(eventloop, + () -> { + dateRef.set(HttpHeaderValue.ofBytes(encodeAscii(getServerTime()))); + return Promise.complete(); + }) + .withInterval(Duration.ofSeconds(1)); + } + + @Provides + @Worker + AsyncServlet mainServlet(@Named("json") AsyncServlet jsonServlet, @Named("plaintext") AsyncServlet plaintextServlet) { + return request -> { + String path = request.getPath(); + if ("/json".equals(path)) { + return jsonServlet.serve(request); + } else if ("/plaintext".equals(path)) { + return plaintextServlet.serve(request); + } else { + return Promise.ofException(HttpError.ofCode(400)); + } + }; + } + + @Provides + @Worker + JsonWriter jsonWriter() { + return DSL_JSON.newWriter(); + } + + @Provides + @Worker + @Named("json") + AsyncServlet jsonServlet(JsonWriter writer) { + return request -> { + try { + writer.reset(); + DSL_JSON.serialize(writer, new HelloWorldObject("Hello, world!")); + } catch (Exception e) { + return Promise.ofException(HttpError.ofCode(400, "Failed to serialize JSON", e)); + } + return HttpResponse.ok200() + .withBody(ByteBuf.wrapForReading(writer.toByteArray())) + .withHeader(HttpHeaders.CONTENT_TYPE, JSON_CONTENT_TYPE_HEADER_VALUE) + .withHeader(HttpHeaders.SERVER, SERVER_HEADER_VALUE) + .withHeader(HttpHeaders.DATE, dateRef.get()); + }; + } + + @Provides + @Worker + @Named("plaintext") + AsyncServlet plaintextServlet() { + return request -> HttpResponse.ok200() + .withBody(ByteBuf.wrap(PLAINTEXT_BYTES, 0, 13)) + .withHeader(HttpHeaders.CONTENT_TYPE, TEXT_CONTENT_TYPE_HEADER_VALUE) + .withHeader(HttpHeaders.SERVER, SERVER_HEADER_VALUE) + .withHeader(HttpHeaders.DATE, dateRef.get()); + } + + static String getServerTime() { + return ZonedDateTime + .now() + .format(FORMATTER); + } + + @Override + protected Module getOverrideModule() { + return new AbstractModule() { + @Provides + Config config() { + return Config.create() + .with("http.listenAddresses", Config.ofValue(ofInetSocketAddress(), new InetSocketAddress(PORT))) + .with("workers", "" + Integer.toString(2 * Runtime.getRuntime().availableProcessors())) + .overrideWith(ofClassPathProperties(PROPERTIES_FILE, true)) + .overrideWith(ofSystemProperties("config")); + } + }; + } + + public static void main(String[] args) throws Exception { + new Main().launch(args); + } +} diff --git a/frameworks/Java/activej/src/main/resources/http-server.properties b/frameworks/Java/activej/src/main/resources/http-server.properties new file mode 100644 index 00000000000..aef982efb32 --- /dev/null +++ b/frameworks/Java/activej/src/main/resources/http-server.properties @@ -0,0 +1,2 @@ +eventloop.worker.idleInterval=10 millis +eventloop.primary.idleInterval=10 millis \ No newline at end of file diff --git a/frameworks/Java/officefloor/officefloor-async.dockerfile b/frameworks/Java/officefloor/officefloor-async.dockerfile index cc6e312c025..bc67a50fb55 100644 --- a/frameworks/Java/officefloor/officefloor-async.dockerfile +++ b/frameworks/Java/officefloor/officefloor-async.dockerfile @@ -7,7 +7,5 @@ RUN mvn -B clean package FROM openjdk:15 WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark_async/target/woof_benchmark_async-1.0.0.jar server.jar - EXPOSE 8080 - -CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=OF", "-Dhttp.date.header=true", "-jar", "server.jar"] +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-micro.dockerfile b/frameworks/Java/officefloor/officefloor-micro.dockerfile index be751972ddd..84f49f9ef55 100644 --- a/frameworks/Java/officefloor/officefloor-micro.dockerfile +++ b/frameworks/Java/officefloor/officefloor-micro.dockerfile @@ -7,7 +7,5 @@ RUN mvn -B clean package FROM openjdk:15 WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark_micro/target/woof_benchmark_micro-1.0.0.jar server.jar - EXPOSE 8080 - -CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=OF", "-Dhttp.date.header=true", "-jar", "server.jar"] +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-netty.dockerfile b/frameworks/Java/officefloor/officefloor-netty.dockerfile index 18ef89f97be..4153cd63315 100644 --- a/frameworks/Java/officefloor/officefloor-netty.dockerfile +++ b/frameworks/Java/officefloor/officefloor-netty.dockerfile @@ -11,7 +11,5 @@ RUN mvn -B clean package FROM openjdk:15 WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark_netty/target/woof_benchmark_netty-1.0.0.jar server.jar - EXPOSE 8080 - -CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=OF", "-Dhttp.date.header=true", "-jar", "server.jar"] +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-raw.dockerfile b/frameworks/Java/officefloor/officefloor-raw.dockerfile index 01fffa8e926..28ceaeded08 100644 --- a/frameworks/Java/officefloor/officefloor-raw.dockerfile +++ b/frameworks/Java/officefloor/officefloor-raw.dockerfile @@ -8,7 +8,5 @@ FROM openjdk:15-slim RUN apt-get update && apt-get install -y libjna-java WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark_raw/target/woof_benchmark_raw-1.0.0.jar server.jar - EXPOSE 8080 - CMD ["java", "-Xms2g", "-Xmx2g", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-spring_data.dockerfile b/frameworks/Java/officefloor/officefloor-spring_data.dockerfile index ab9c3435c9f..def4f1922aa 100644 --- a/frameworks/Java/officefloor/officefloor-spring_data.dockerfile +++ b/frameworks/Java/officefloor/officefloor-spring_data.dockerfile @@ -7,7 +7,5 @@ RUN mvn -B clean package FROM openjdk:15 WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark_spring/target/woof_benchmark_spring-1.0.0-exec.jar server.jar - EXPOSE 8080 - -CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=OF", "-Dhttp.date.header=true", "-jar", "server.jar"] +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-thread_affinity.dockerfile b/frameworks/Java/officefloor/officefloor-thread_affinity.dockerfile index 89093a2f633..40096c95e1e 100644 --- a/frameworks/Java/officefloor/officefloor-thread_affinity.dockerfile +++ b/frameworks/Java/officefloor/officefloor-thread_affinity.dockerfile @@ -12,7 +12,5 @@ FROM openjdk:15-slim RUN apt-get update && apt-get install -y libjna-java WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark_thread_affinity/target/woof_benchmark_thread_affinity-1.0.0.jar server.jar - EXPOSE 8080 - -CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=OF", "-Dhttp.date.header=true", "-jar", "server.jar"] +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-undertow.dockerfile b/frameworks/Java/officefloor/officefloor-undertow.dockerfile index 202bd7b71ef..1dc6f6ef498 100644 --- a/frameworks/Java/officefloor/officefloor-undertow.dockerfile +++ b/frameworks/Java/officefloor/officefloor-undertow.dockerfile @@ -11,7 +11,5 @@ RUN mvn -B clean package FROM openjdk:15 WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark_undertow/target/woof_benchmark_undertow-1.0.0.jar server.jar - EXPOSE 8080 - -CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=OF", "-Dhttp.date.header=true", "-jar", "server.jar"] +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor.dockerfile b/frameworks/Java/officefloor/officefloor.dockerfile index 96b86f363f3..e8c039bd83d 100644 --- a/frameworks/Java/officefloor/officefloor.dockerfile +++ b/frameworks/Java/officefloor/officefloor.dockerfile @@ -7,7 +7,5 @@ RUN mvn -B clean package FROM openjdk:15 WORKDIR /officefloor COPY --from=maven /officefloor/src/woof_benchmark/target/woof_benchmark-1.0.0.jar server.jar - EXPOSE 8080 - -CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=OF", "-Dhttp.date.header=true", "-jar", "server.jar"] +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/src/pom.xml b/frameworks/Java/officefloor/src/pom.xml index 07294fb1600..b45e0e99f61 100644 --- a/frameworks/Java/officefloor/src/pom.xml +++ b/frameworks/Java/officefloor/src/pom.xml @@ -39,7 +39,7 @@ net.officefloor bom - 3.30.2 + 3.31.0 pom import diff --git a/frameworks/Java/officefloor/src/woof_benchmark_raw/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_raw/pom.xml index b4a98f6a871..200c18ed680 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_raw/pom.xml +++ b/frameworks/Java/officefloor/src/woof_benchmark_raw/pom.xml @@ -29,10 +29,6 @@ io.r2dbc r2dbc-pool - - com.github.spullara.mustache.java - compiler - net.officefloor.web officeweb_executive diff --git a/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/java/net/officefloor/benchmark/RawOfficeFloorMain.java b/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/java/net/officefloor/benchmark/RawOfficeFloorMain.java index 505bf553713..7dadac67615 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/java/net/officefloor/benchmark/RawOfficeFloorMain.java +++ b/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/java/net/officefloor/benchmark/RawOfficeFloorMain.java @@ -41,9 +41,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.module.afterburner.AfterburnerModule; -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; import io.r2dbc.pool.PoolingConnectionFactoryProvider; import io.r2dbc.spi.Batch; @@ -79,6 +76,7 @@ import net.officefloor.server.http.impl.ProcessAwareServerHttpConnectionManagedObject; import net.officefloor.server.http.parse.HttpRequestParser; import net.officefloor.server.http.parse.HttpRequestParser.HttpRequestParserMetaData; +import net.officefloor.server.stream.ServerWriter; import net.officefloor.server.stream.impl.ThreadLocalStreamBufferPool; import net.officefloor.web.executive.CpuCore; import net.officefloor.web.executive.CpuCore.LogicalCpu; @@ -222,7 +220,7 @@ private static class RawHttpServicerFactory extends AbstractHttpServicerFactory private static HttpHeaderName NAME_SERVER = new HttpHeaderName("Server"); - private static HttpHeaderValue VALUE_SERVER = new HttpHeaderValue("OF"); + private static HttpHeaderValue VALUE_SERVER = new HttpHeaderValue("O"); private static HttpHeaderName NAME_DATE = new HttpHeaderName("Date"); @@ -238,6 +236,18 @@ private static class RawHttpServicerFactory extends AbstractHttpServicerFactory private static final String UPDATE_PATH_PREFIX = "/update?queries="; + private static final byte[] TEMPLATE_START = "Fortunes" + .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] FORTUNE_START = "".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] TEMPLATE_END = "
idmessage
".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] FORTUNE_MIDDLE = "".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] FORTUNE_END = "
" + .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + private static final R2dbcTransientResourceException THROTTLED = new R2dbcTransientResourceException(); /** @@ -291,11 +301,6 @@ public R run(ProcessSafeOperation operation) thro */ private final ThreadLocal threadLocalRateLimit = new ThreadLocal(); - /** - * {@link Mustache} for /fortunes. - */ - private final Mustache fortuneMustache; - /** * Instantiate. * @@ -320,19 +325,6 @@ protected Connection[] initialValue() { return connections; } }; - - // Load the mustache fortunes template - MustacheFactory mustacheFactory = new DefaultMustacheFactory() { - @Override - public void encode(String value, Writer writer) { - try { - StringEscapeUtils.ESCAPE_HTML4.translate(value, writer); - } catch (IOException ex) { - ex.printStackTrace(); - } - } - }; - this.fortuneMustache = mustacheFactory.compile("fortunes.mustache"); } /** @@ -542,7 +534,17 @@ private void fortunes(HttpResponse response, // Send response response.setContentType(TEXT_HTML, null); - this.fortuneMustache.execute(response.getEntityWriter(), fortunes); + ServerWriter writer = response.getEntityWriter(); + writer.write(TEMPLATE_START); + for (Fortune fortune : fortunes) { + writer.write(FORTUNE_START); + int id = fortune.getId(); + writer.write(Integer.valueOf(id).toString()); + writer.write(FORTUNE_MIDDLE); + StringEscapeUtils.ESCAPE_HTML4.translate(fortune.getMessage(), writer); + writer.write(FORTUNE_END); + } + writer.write(TEMPLATE_END); this.send(connection); } catch (CancelledKeyException | ClosedChannelException ex) { // Ignore as disconnecting client diff --git a/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml index 88f1003ed1c..71121c55e8e 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml +++ b/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml @@ -39,7 +39,7 @@ org.springframework.boot spring-boot-maven-plugin - 2.3.5.RELEASE + 2.4.2 net.officefloor.OfficeFloorMain exec diff --git a/frameworks/Java/smart-socket/pom.xml b/frameworks/Java/smart-socket/pom.xml index 684419d007a..20fc84bbe4c 100644 --- a/frameworks/Java/smart-socket/pom.xml +++ b/frameworks/Java/smart-socket/pom.xml @@ -11,8 +11,8 @@ 11 11 2.11.0 - 1.0.2 - 0.1.2-SNAPSHOT + 1.0.3-SNAPSHOT + 0.1.3-SNAPSHOT 1.5.5-SNAPSHOT diff --git a/frameworks/Java/smart-socket/src/main/java/org/smartboot/servlet/Bootstrap.java b/frameworks/Java/smart-socket/src/main/java/org/smartboot/servlet/Bootstrap.java index d550c8301ca..cff3ca28d73 100644 --- a/frameworks/Java/smart-socket/src/main/java/org/smartboot/servlet/Bootstrap.java +++ b/frameworks/Java/smart-socket/src/main/java/org/smartboot/servlet/Bootstrap.java @@ -1,9 +1,12 @@ package org.smartboot.servlet; import org.smartboot.aio.EnhanceAsynchronousChannelProvider; +import org.smartboot.http.HttpRequest; +import org.smartboot.http.HttpResponse; import org.smartboot.http.server.HttpMessageProcessor; import org.smartboot.http.server.HttpRequestProtocol; import org.smartboot.http.server.Request; +import org.smartboot.http.server.handle.HttpHandle; import org.smartboot.servlet.conf.ServletInfo; import org.smartboot.socket.StateMachineEnum; import org.smartboot.socket.buffer.BufferFactory; @@ -24,26 +27,30 @@ public class Bootstrap { public static void main(String[] args) { System.setProperty("java.nio.channels.spi.AsynchronousChannelProvider", EnhanceAsynchronousChannelProvider.class.getName()); - ServletHttpHandle httpHandle = new ServletHttpHandle(); - ContainerRuntime containerRuntime = new ContainerRuntime("/"); + ContainerRuntime containerRuntime = new ContainerRuntime(); // plaintext + ApplicationRuntime applicationRuntime = new ApplicationRuntime("/"); ServletInfo plainTextServletInfo = new ServletInfo(); plainTextServletInfo.setServletName("plaintext"); plainTextServletInfo.setServletClass(HelloWorldServlet.class.getName()); plainTextServletInfo.addMapping("/plaintext"); - containerRuntime.getDeploymentInfo().addServlet(plainTextServletInfo); + applicationRuntime.getDeploymentInfo().addServlet(plainTextServletInfo); // json ServletInfo jsonServletInfo = new ServletInfo(); jsonServletInfo.setServletName("json"); jsonServletInfo.setServletClass(HelloWorldServlet.class.getName()); jsonServletInfo.addMapping("/json"); - containerRuntime.getDeploymentInfo().addServlet(jsonServletInfo); - httpHandle.addRuntime(containerRuntime); - - httpHandle.start(); + applicationRuntime.getDeploymentInfo().addServlet(jsonServletInfo); + containerRuntime.addRuntime(applicationRuntime); + containerRuntime.start(); HttpMessageProcessor processor = new HttpMessageProcessor(); - processor.pipeline(httpHandle); + processor.pipeline(new HttpHandle() { + @Override + public void doHandle(HttpRequest request, HttpResponse response) throws IOException { + containerRuntime.doHandle(request, response); + } + }); http(processor); } diff --git a/frameworks/PHP/cakephp/cakephp.dockerfile b/frameworks/PHP/cakephp/cakephp.dockerfile index b64e0b386df..e4018b33de2 100644 --- a/frameworks/PHP/cakephp/cakephp.dockerfile +++ b/frameworks/PHP/cakephp/cakephp.dockerfile @@ -1,28 +1,27 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq nginx git unzip php7.4 php7.4-common php7.4-cli php7.4-fpm php7.4-mysql php7.4-xml php7.4-mbstring php7.0-mcrypt php7.4-intl > /dev/null + apt-get install -yqq nginx git unzip \ + php8.0-fpm php8.0-mysql php8.0-xml php8.0-mbstring php8.0-intl > /dev/null RUN apt-get install -yqq composer > /dev/null -COPY deploy/conf/* /etc/php/7.4/fpm/ -COPY deploy/conf/* /etc/php/7.4/cli/ +COPY deploy/conf/* /etc/php/8.0/fpm/ +COPY deploy/conf/* /etc/php/8.0/cli/ ADD ./ /cakephp WORKDIR /cakephp -RUN if [ $(nproc) = 2 ]; then sed -i "s|pm.max_children = 1024|pm.max_children = 512|g" /etc/php/7.4/fpm/php-fpm.conf ; fi; +RUN if [ $(nproc) = 2 ]; then sed -i "s|pm.max_children = 1024|pm.max_children = 512|g" /etc/php/8.0/fpm/php-fpm.conf ; fi; RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet RUN chmod -R 777 /cakephp -EXPOSE 8080 - -CMD service php7.4-fpm start && \ +CMD service php8.0-fpm start && \ nginx -c /cakephp/deploy/nginx.conf diff --git a/frameworks/PHP/cakephp/composer.json b/frameworks/PHP/cakephp/composer.json index c6def89c85c..dbb017036cf 100644 --- a/frameworks/PHP/cakephp/composer.json +++ b/frameworks/PHP/cakephp/composer.json @@ -5,8 +5,7 @@ "type": "project", "license": "MIT", "require": { - "php": ">=7.2", - "cakephp/cakephp": "^4.0.2", + "cakephp/cakephp": "^4.1", "cakephp/plugin-installer": "^1.0" }, "autoload": { @@ -15,7 +14,6 @@ } }, "scripts": { - "post-autoload-dump": "Cake\\Composer\\Installer\\PluginInstaller::postAutoloadDump", "check": [ "@test", "@cs-check" diff --git a/frameworks/PHP/comet/comet-mysql.dockerfile b/frameworks/PHP/comet/comet-mysql.dockerfile index 8085b954bdf..30b1d5c3e87 100644 --- a/frameworks/PHP/comet/comet-mysql.dockerfile +++ b/frameworks/PHP/comet/comet-mysql.dockerfile @@ -1,18 +1,18 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq php7.4 php7.4-common php7.4-cli php7.4-xml php7.4-mysql > /dev/null + apt-get install -yqq php8.0-cli php8.0-xml php8.0-mysql > /dev/null RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php-dev libevent-dev > /dev/null -RUN printf "\n\n /usr/lib/x86_64-linux-gnu/\n\n\nno\n\n\n" | pecl install event > /dev/null && echo "extension=event.so" > /etc/php/7.4/cli/conf.d/event.ini +RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini -COPY php.ini /etc/php/7.4/cli/php.ini +COPY php.ini /etc/php/8.0/cli/php.ini ADD ./ /comet WORKDIR /comet diff --git a/frameworks/PHP/comet/comet.dockerfile b/frameworks/PHP/comet/comet.dockerfile index cc9c4318a17..472a3799829 100644 --- a/frameworks/PHP/comet/comet.dockerfile +++ b/frameworks/PHP/comet/comet.dockerfile @@ -1,24 +1,22 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq php7.4 php7.4-common php7.4-cli php7.4-pgsql php7.4-xml > /dev/null + apt-get install -yqq php8.0-cli php8.0-pgsql php8.0-xml > /dev/null RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php-dev libevent-dev > /dev/null -RUN printf "\n\n /usr/lib/x86_64-linux-gnu/\n\n\nno\n\n\n" | pecl install event > /dev/null && echo "extension=event.so" > /etc/php/7.4/cli/conf.d/event.ini +RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini -COPY php.ini /etc/php/7.4/cli/php.ini +COPY php.ini /etc/php/8.0/cli/php.ini ADD ./ /comet WORKDIR /comet RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet -EXPOSE 8080 - CMD php /comet/app.php start diff --git a/frameworks/PHP/comet/php.ini b/frameworks/PHP/comet/php.ini index e12bbd2fb0c..f0c616f9fb2 100644 --- a/frameworks/PHP/comet/php.ini +++ b/frameworks/PHP/comet/php.ini @@ -8,3 +8,6 @@ opcache.huge_code_pages=1 mysqlnd.collect_statistics = Off memory_limit = 512M + +opcache.jit_buffer_size=128M +opcache.jit=tracing diff --git a/frameworks/PHP/comet/src/Controllers/QueryController.php b/frameworks/PHP/comet/src/Controllers/QueryController.php index b59fde14468..f1eb49d3bf4 100644 --- a/frameworks/PHP/comet/src/Controllers/QueryController.php +++ b/frameworks/PHP/comet/src/Controllers/QueryController.php @@ -13,7 +13,7 @@ class QueryController public function __invoke(Request $request, Response $response, $args) { $queryParams = $request->getQueryParams(); - $q = $queryParams['q'] ?? 0; + $q = (int) $queryParams['q'] ?? 0; $query_count = $q > 1 ? min($q, 500) : 1; while ($query_count--) { diff --git a/frameworks/PHP/comet/src/Controllers/UpdateController.php b/frameworks/PHP/comet/src/Controllers/UpdateController.php index c4f8d2eef32..05effe7cd59 100644 --- a/frameworks/PHP/comet/src/Controllers/UpdateController.php +++ b/frameworks/PHP/comet/src/Controllers/UpdateController.php @@ -13,7 +13,7 @@ class UpdateController public function __invoke(Request $request, Response $response, $args) { $queryParams = $request->getQueryParams(); - $q = $queryParams['q'] ?? 0; + $q = (int) $queryParams['q'] ?? 0; $query_count = $q > 1 ? min($q, 500) : 1; // TODO Speedup with signle transaction? diff --git a/frameworks/PHP/duckphp/app/Controller/Main.php b/frameworks/PHP/duckphp/app/Controller/Main.php index 7232446b780..8ed39d2f825 100644 --- a/frameworks/PHP/duckphp/app/Controller/Main.php +++ b/frameworks/PHP/duckphp/app/Controller/Main.php @@ -36,7 +36,7 @@ public function db() } public function updates() { - $queries = C::GET('queries',1); + $queries = (int) C::GET('queries',1); $query_count = 1; if ($queries > 1) { $query_count = min($queries, 500); @@ -47,7 +47,7 @@ public function updates() } public function queries() { - $queries = C::GET('queries',1); + $queries = (int) C::GET('queries',1); $query_count = 1; if ($queries > 1) { $query_count = min($queries, 500); diff --git a/frameworks/PHP/duckphp/app/Model/FortuneModel.php b/frameworks/PHP/duckphp/app/Model/FortuneModel.php index 52cb94dac3d..217a202b212 100644 --- a/frameworks/PHP/duckphp/app/Model/FortuneModel.php +++ b/frameworks/PHP/duckphp/app/Model/FortuneModel.php @@ -15,7 +15,7 @@ public function getFortunes() { $sql = 'SELECT id, message FROM fortune'; $data = M::DB()->fetchAll($sql); - $ret = array_combine(array_column($data,'id'),array_column($data,'message')); + $ret = array_column($data, 'message', 'id'); $ret[0] = 'Additional fortune added at request time.'; asort($ret); return $ret; diff --git a/frameworks/PHP/duckphp/app/System/BaseBusiness.php b/frameworks/PHP/duckphp/app/System/BaseBusiness.php index 2d36d7d43d7..eeb10903f83 100644 --- a/frameworks/PHP/duckphp/app/System/BaseBusiness.php +++ b/frameworks/PHP/duckphp/app/System/BaseBusiness.php @@ -6,9 +6,9 @@ namespace DuckPhpBenchmark\System; -use DuckPhp\SingletonEx\SingletonEx; +use DuckPhp\SingletonEx\SingletonExTrait; class BaseBusiness { - use SingletonEx; + use SingletonExTrait; } diff --git a/frameworks/PHP/duckphp/app/System/BaseModel.php b/frameworks/PHP/duckphp/app/System/BaseModel.php index 9b3bac30826..f66f2f0ae77 100644 --- a/frameworks/PHP/duckphp/app/System/BaseModel.php +++ b/frameworks/PHP/duckphp/app/System/BaseModel.php @@ -6,10 +6,10 @@ namespace DuckPhpBenchmark\System; -use DuckPhp\SingletonEx\SingletonEx; +use DuckPhp\SingletonEx\SingletonExTrait; class BaseModel { - use SingletonEx; + use SingletonExTrait; // override or add your code here } diff --git a/frameworks/PHP/duckphp/composer.json b/frameworks/PHP/duckphp/composer.json index 53287ccd044..c4f3f950d78 100644 --- a/frameworks/PHP/duckphp/composer.json +++ b/frameworks/PHP/duckphp/composer.json @@ -2,8 +2,7 @@ "name": "dvaknheo/duckphp-benchmark", "type": "project", "require" : { - "php" : "^7.2", - "dvaknheo/duckphp" : "1.2.7" + "dvaknheo/duckphp" : "1.2.10" }, "autoload" : { "psr-4" : { diff --git a/frameworks/PHP/duckphp/deploy/conf/php-fpm.conf b/frameworks/PHP/duckphp/deploy/conf/php-fpm.conf index 39d756209dc..0f1180c79eb 100644 --- a/frameworks/PHP/duckphp/deploy/conf/php-fpm.conf +++ b/frameworks/PHP/duckphp/deploy/conf/php-fpm.conf @@ -14,14 +14,13 @@ ; Pid file ; Note: the default prefix is /var ; Default Value: none -pid = /run/php/php7.4-fpm.pid +pid = /run/php/php-fpm.pid ; Error log file ; If it's set to "syslog", log is sent to syslogd instead of being written ; into a local file. ; Note: the default prefix is /var ; Default Value: log/php-fpm.log -;error_log = /var/log/php7.4-fpm.log error_log = /dev/stderr @@ -161,7 +160,7 @@ group = www-data ; (IPv6 and IPv4-mapped) on a specific port; ; '/path/to/unix/socket' - to listen on a unix socket. ; Note: This value is mandatory. -listen = /var/run/php/php7.4-fpm.sock +listen = /var/run/php/php-fpm.sock ; Set listen(2) backlog. ; Default Value: 511 (-1 on FreeBSD and OpenBSD) diff --git a/frameworks/PHP/duckphp/deploy/nginx.conf b/frameworks/PHP/duckphp/deploy/nginx.conf index 99662e6e2be..3c0e7cf3d6f 100644 --- a/frameworks/PHP/duckphp/deploy/nginx.conf +++ b/frameworks/PHP/duckphp/deploy/nginx.conf @@ -2,6 +2,7 @@ user www-data; worker_processes auto; error_log stderr error; worker_rlimit_nofile 200000; +daemon off; events { worker_connections 16384; @@ -40,7 +41,7 @@ http { upstream fastcgi_backend { - server unix:/var/run/php/php7.4-fpm.sock; + server unix:/var/run/php/php-fpm.sock; keepalive 50; } diff --git a/frameworks/PHP/duckphp/duckphp.dockerfile b/frameworks/PHP/duckphp/duckphp.dockerfile index c156528e6c9..1028cdf8bc0 100644 --- a/frameworks/PHP/duckphp/duckphp.dockerfile +++ b/frameworks/PHP/duckphp/duckphp.dockerfile @@ -1,24 +1,23 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq nginx git unzip php7.4 php7.4-common php7.4-cli php7.4-fpm php7.4-mysql > /dev/null + apt-get install -yqq nginx git unzip \ + php8.0-cli php8.0-fpm php8.0-mysql > /dev/null RUN apt-get install -yqq composer > /dev/null -COPY deploy/conf/* /etc/php/7.4/fpm/ +COPY deploy/conf/* /etc/php/8.0/fpm/ ADD ./ /duckphp WORKDIR /duckphp -RUN if [ $(nproc) = 2 ]; then sed -i "s|pm.max_children = 1024|pm.max_children = 512|g" /etc/php/7.4/fpm/php-fpm.conf ; fi; +RUN if [ $(nproc) = 2 ]; then sed -i "s|pm.max_children = 1024|pm.max_children = 512|g" /etc/php/8.0/fpm/php-fpm.conf ; fi; RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet -EXPOSE 8080 - -CMD service php7.4-fpm start && \ - nginx -c /duckphp/deploy/nginx.conf -g "daemon off;" \ No newline at end of file +CMD service php8.0-fpm start && \ + nginx -c /duckphp/deploy/nginx.conf diff --git a/frameworks/PHP/hyperf/app/Render.php b/frameworks/PHP/hyperf/app/Render.php index eea61fbf619..bc3919a3186 100644 --- a/frameworks/PHP/hyperf/app/Render.php +++ b/frameworks/PHP/hyperf/app/Render.php @@ -8,10 +8,11 @@ use Hyperf\Task\TaskExecutor; use Hyperf\View\Engine\EngineInterface; use Hyperf\View\Mode; +use Psr\Http\Message\ResponseInterface; class Render extends \Hyperf\View\Render { - public function render(string $template, array $data) + public function render(string $template, array $data = []): ResponseInterface { switch ($this->mode) { case Mode::SYNC: @@ -29,4 +30,4 @@ public function render(string $template, array $data) } -} \ No newline at end of file +} diff --git a/frameworks/PHP/kumbiaphp/bench/app/controllers/ku_controller.php b/frameworks/PHP/kumbiaphp/bench/app/controllers/ku_controller.php index 4c19fa3676a..0c545d23794 100644 --- a/frameworks/PHP/kumbiaphp/bench/app/controllers/ku_controller.php +++ b/frameworks/PHP/kumbiaphp/bench/app/controllers/ku_controller.php @@ -13,8 +13,8 @@ protected function before_filter() public function index() { - KuRaw::$db->execute([mt_rand(1, 10000)]); - echo json_encode(KuRaw::$db->fetch()); + KuRaw::$random->execute([mt_rand(1, 10000)]); + echo json_encode(KuRaw::$random->fetch()); } public function query($count = 1) @@ -22,8 +22,8 @@ public function query($count = 1) $count = min(max((int) $count, 1), 500); while ($count--) { - KuRaw::$db->execute([mt_rand(1, 10000)]); - $worlds[] = KuRaw::$db->fetch(); + KuRaw::$random->execute([mt_rand(1, 10000)]); + $worlds[] = KuRaw::$random->fetch(); } echo json_encode($worlds); } @@ -33,14 +33,14 @@ public function update($count = 1) $count = min(max((int) $count, 1), 500); while ($count--) { - $id = mt_rand(1, 10000); - KuRaw::$random->execute([$id]); - $row = ['id' => $id, 'randomNumber' => KuRaw::$random->fetchColumn()]; + KuRaw::$random->execute([mt_rand(1, 10000)]); + $row = KuRaw::$random->fetch(); $row['randomNumber'] = mt_rand(1, 10000); $worlds[] = $row; } + KuRaw::update($worlds); echo json_encode($worlds); diff --git a/frameworks/PHP/kumbiaphp/bench/app/libs/ku_raw.php b/frameworks/PHP/kumbiaphp/bench/app/libs/ku_raw.php index a691f364011..7e038d4fe87 100644 --- a/frameworks/PHP/kumbiaphp/bench/app/libs/ku_raw.php +++ b/frameworks/PHP/kumbiaphp/bench/app/libs/ku_raw.php @@ -24,7 +24,6 @@ public static function init() ] ); - self::$db = $pdo->prepare('SELECT id,randomNumber FROM World WHERE id = ?'); self::$fortune = $pdo->prepare('SELECT id,message FROM Fortune'); self::$random = $pdo->prepare('SELECT id,randomNumber FROM World WHERE id = ?'); self::$instance = $pdo; @@ -42,9 +41,9 @@ public static function update(array $worlds) if (!isset(self::$update[$rows])) { $sql = 'UPDATE world SET randomNumber = CASE id' - . str_repeat(' WHEN ?::INTEGER THEN ?::INTEGER ', $rows) . - 'END WHERE id IN (' - . implode(', ', array_fill(0, $rows, '?::INTEGER')) . ')'; + . str_repeat(' WHEN ?::INTEGER THEN ?::INTEGER ', $rows) + . 'END WHERE id IN (' + . str_repeat('?::INTEGER,', $rows - 1) . '?::INTEGER)'; self::$update[$rows] = self::$instance->prepare($sql); } diff --git a/frameworks/PHP/kumbiaphp/kumbiaphp-raw.dockerfile b/frameworks/PHP/kumbiaphp/kumbiaphp-raw.dockerfile index 3bcb46bd48a..0bce6ecdb3b 100644 --- a/frameworks/PHP/kumbiaphp/kumbiaphp-raw.dockerfile +++ b/frameworks/PHP/kumbiaphp/kumbiaphp-raw.dockerfile @@ -5,7 +5,8 @@ ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq nginx git unzip php8.0 php8.0-common php8.0-cli php8.0-fpm php8.0-mysql > /dev/null + apt-get install -yqq nginx git unzip \ + php8.0-fpm php8.0-mysql > /dev/null COPY deploy/conf/* /etc/php/8.0/fpm/ diff --git a/frameworks/PHP/kumbiaphp/kumbiaphp.dockerfile b/frameworks/PHP/kumbiaphp/kumbiaphp.dockerfile index fd3421563ac..8701b2db3f6 100644 --- a/frameworks/PHP/kumbiaphp/kumbiaphp.dockerfile +++ b/frameworks/PHP/kumbiaphp/kumbiaphp.dockerfile @@ -5,7 +5,8 @@ ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq nginx git unzip php8.0 php8.0-common php8.0-cli php8.0-fpm php8.0-mysql > /dev/null + apt-get install -yqq nginx git unzip \ + php8.0-fpm php8.0-mysql > /dev/null COPY deploy/conf/* /etc/php/8.0/fpm/ diff --git a/frameworks/PHP/laravel/laravel-laravel-s.dockerfile b/frameworks/PHP/laravel/laravel-laravel-s.dockerfile index ff3457f4685..32325c2fff3 100644 --- a/frameworks/PHP/laravel/laravel-laravel-s.dockerfile +++ b/frameworks/PHP/laravel/laravel-laravel-s.dockerfile @@ -1,12 +1,12 @@ -FROM php:8.0 +FROM php:8.0-cli RUN pecl install swoole > /dev/null && \ docker-php-ext-enable swoole RUN docker-php-ext-install pdo_mysql pcntl opcache > /dev/null RUN echo "opcache.enable_cli=1" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit=1205" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit=1205" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini ADD ./ /laravel WORKDIR /laravel diff --git a/frameworks/PHP/laravel/laravel.dockerfile b/frameworks/PHP/laravel/laravel.dockerfile index d5111a2342c..06a32025277 100644 --- a/frameworks/PHP/laravel/laravel.dockerfile +++ b/frameworks/PHP/laravel/laravel.dockerfile @@ -5,7 +5,8 @@ ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq nginx git unzip php8.0 php8.0-common php8.0-cli php8.0-fpm php8.0-mysql > /dev/null + apt-get install -yqq nginx git unzip \ + php8.0-cli php8.0-fpm php8.0-mysql > /dev/null RUN apt-get install -yqq php8.0-mbstring php8.0-xml > /dev/null RUN apt-get install -yqq composer > /dev/null diff --git a/frameworks/PHP/lumen/lumen-laravel-s.dockerfile b/frameworks/PHP/lumen/lumen-laravel-s.dockerfile index 9da385804fb..e30883a2e0a 100644 --- a/frameworks/PHP/lumen/lumen-laravel-s.dockerfile +++ b/frameworks/PHP/lumen/lumen-laravel-s.dockerfile @@ -1,12 +1,12 @@ -FROM php:8.0 +FROM php:8.0-cli RUN pecl install swoole > /dev/null && \ docker-php-ext-enable swoole RUN docker-php-ext-install pdo_mysql pcntl opcache > /dev/null RUN echo "opcache.enable_cli=1" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit=1205" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini -RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit=1205" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini ADD ./ /lumen WORKDIR /lumen diff --git a/frameworks/PHP/lumen/lumen.dockerfile b/frameworks/PHP/lumen/lumen.dockerfile index fb4c92e346a..3b82aed44dc 100644 --- a/frameworks/PHP/lumen/lumen.dockerfile +++ b/frameworks/PHP/lumen/lumen.dockerfile @@ -5,7 +5,8 @@ ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq nginx git unzip php8.0 php8.0-common php8.0-cli php8.0-fpm php8.0-mysql > /dev/null + apt-get install -yqq nginx git unzip \ + php8.0-cli php8.0-fpm php8.0-mysql > /dev/null RUN apt-get install -yqq php8.0-mbstring php8.0-xml > /dev/null RUN apt-get install -yqq composer > /dev/null diff --git a/frameworks/PHP/peachpie/Server/Server.csproj b/frameworks/PHP/peachpie/Server/Server.csproj index 925f3e840f9..a366eeed446 100644 --- a/frameworks/PHP/peachpie/Server/Server.csproj +++ b/frameworks/PHP/peachpie/Server/Server.csproj @@ -1,7 +1,7 @@ - net5.0 + net50 Exe diff --git a/frameworks/PHP/peachpie/global.json b/frameworks/PHP/peachpie/global.json index 5e70e12d61a..166cbe6e9b7 100644 --- a/frameworks/PHP/peachpie/global.json +++ b/frameworks/PHP/peachpie/global.json @@ -1,5 +1,5 @@ { "msbuild-sdks": { - "Peachpie.NET.Sdk": "1.0.0-appv4128" + "Peachpie.NET.Sdk": "1.0.0-preview5" } } \ No newline at end of file diff --git a/frameworks/PHP/php-ngx/app-async.php b/frameworks/PHP/php-ngx/app-async.php index 427b27241bd..80b78b39eb7 100644 --- a/frameworks/PHP/php-ngx/app-async.php +++ b/frameworks/PHP/php-ngx/app-async.php @@ -37,7 +37,7 @@ function query() $my = new php\ngx\mysql(); yield from $my->connect(DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME); $query_count = 1; - $params = ngx::query_args()['q']; + $params = (int) ngx::query_args()['q']; if ($params > 1) { $query_count = min($params, 500); } @@ -68,7 +68,7 @@ function update() $my = new php\ngx\mysql(); yield from $my->connect(DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME); $query_count = 1; - $params = ngx::query_args()['q']; + $params = (int) ngx::query_args()['q']; if ($params > 1) { $query_count = min($params, 500); } diff --git a/frameworks/PHP/php-ngx/app.php b/frameworks/PHP/php-ngx/app.php index 53998cf3d94..809fc19279a 100644 --- a/frameworks/PHP/php-ngx/app.php +++ b/frameworks/PHP/php-ngx/app.php @@ -24,7 +24,7 @@ function query() ngx_header_set('Content-Type', 'application/json'); $query_count = 1; - $params = ngx::query_args()['q']; + $params = (int) ngx::query_args()['q']; if ($params > 1) { $query_count = min($params, 500); } @@ -42,7 +42,7 @@ function update() ngx_header_set('Content-Type', 'application/json'); $query_count = 1; - $params = ngx::query_args()['q']; + $params = (int) ngx::query_args()['q']; if ($params > 1) { $query_count = min($params, 500); } diff --git a/frameworks/PHP/php-ngx/php-ngx-async.dockerfile b/frameworks/PHP/php-ngx/php-ngx-async.dockerfile index 0f8bd9ee9e6..37cb2f442cd 100644 --- a/frameworks/PHP/php-ngx/php-ngx-async.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-async.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive @@ -6,20 +6,20 @@ RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /de RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php > /dev/null RUN apt-get update -yqq > /dev/null && \ apt-get install -yqq wget git unzip libxml2-dev cmake make systemtap-sdt-dev \ - zlibc zlib1g zlib1g-dev libpcre3 libpcre3-dev libargon2-0-dev libsodium-dev \ - php7.4 php7.4-common php7.4-dev libphp7.4-embed php7.4-mysql nginx > /dev/null + zlib1g-dev libpcre3-dev libargon2-0-dev libsodium-dev \ + php8.0-cli php8.0-dev libphp8.0-embed php8.0-mysql nginx > /dev/null ADD ./ ./ -ENV NGINX_VERSION=1.19.2 +ENV NGINX_VERSION=1.19.6 -RUN git clone -b v0.0.24 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null +RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null RUN wget -q http://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz && \ tar -zxf nginx-${NGINX_VERSION}.tar.gz && \ cd nginx-${NGINX_VERSION} && \ - export PHP_LIB=/usr/lib && \ - ./configure --user=www --group=www \ + export PHP_LIB=/usr/lib && \ + bash ./configure --user=www --group=www \ --prefix=/nginx \ --with-ld-opt="-Wl,-rpath,$PHP_LIB" \ --add-module=/ngx_php7/third_party/ngx_devel_kit \ diff --git a/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile b/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile index 66f73fa9551..29348083cf8 100644 --- a/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive @@ -6,20 +6,20 @@ RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /de RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php > /dev/null RUN apt-get update -yqq > /dev/null && \ apt-get install -yqq wget git unzip libxml2-dev cmake make systemtap-sdt-dev \ - zlibc zlib1g zlib1g-dev libpcre3 libpcre3-dev libargon2-0-dev libsodium-dev \ - php7.4 php7.4-common php7.4-dev libphp7.4-embed php7.4-mysql nginx > /dev/null + zlib1g-dev libpcre3-dev libargon2-0-dev libsodium-dev \ + php8.0-cli php8.0-dev libphp8.0-embed php8.0-mysql nginx > /dev/null ADD ./ ./ -ENV NGINX_VERSION=1.19.2 +ENV NGINX_VERSION=1.19.6 -RUN git clone -b v0.0.24 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null +RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null RUN wget -q http://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz && \ tar -zxf nginx-${NGINX_VERSION}.tar.gz && \ cd nginx-${NGINX_VERSION} && \ - export PHP_LIB=/usr/lib && \ - ./configure --user=www --group=www \ + export PHP_LIB=/usr/lib && \ + bash ./configure --user=www --group=www \ --prefix=/nginx \ --with-ld-opt="-Wl,-rpath,$PHP_LIB" \ --add-module=/ngx_php7/third_party/ngx_devel_kit \ diff --git a/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile b/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile index 2fc781475be..2e2080bdf4e 100644 --- a/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive @@ -6,20 +6,20 @@ RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /de RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php > /dev/null RUN apt-get update -yqq > /dev/null && \ apt-get install -yqq wget git unzip libxml2-dev cmake make systemtap-sdt-dev \ - zlibc zlib1g zlib1g-dev libpcre3 libpcre3-dev libargon2-0-dev libsodium-dev \ - php7.4 php7.4-common php7.4-dev libphp7.4-embed php7.4-pgsql nginx > /dev/null + zlib1g-dev libpcre3-dev libargon2-0-dev libsodium-dev \ + php8.0-cli php8.0-dev libphp8.0-embed php8.0-pgsql nginx > /dev/null ADD ./ ./ -ENV NGINX_VERSION=1.19.2 +ENV NGINX_VERSION=1.19.6 -RUN git clone -b v0.0.24 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null +RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null RUN wget -q http://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz && \ tar -zxf nginx-${NGINX_VERSION}.tar.gz && \ cd nginx-${NGINX_VERSION} && \ - export PHP_LIB=/usr/lib && \ - ./configure --user=www --group=www \ + export PHP_LIB=/usr/lib && \ + bash ./configure --user=www --group=www \ --prefix=/nginx \ --with-ld-opt="-Wl,-rpath,$PHP_LIB" \ --add-module=/ngx_php7/third_party/ngx_devel_kit \ diff --git a/frameworks/PHP/php-ngx/php-ngx.dockerfile b/frameworks/PHP/php-ngx/php-ngx.dockerfile index 4714e2a2715..8e40d7f2c5a 100644 --- a/frameworks/PHP/php-ngx/php-ngx.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive @@ -6,20 +6,19 @@ RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /de RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php > /dev/null RUN apt-get update -yqq > /dev/null && \ apt-get install -yqq wget git unzip libxml2-dev cmake make systemtap-sdt-dev \ - zlibc zlib1g zlib1g-dev libpcre3 libpcre3-dev libargon2-0-dev libsodium-dev \ - php7.4 php7.4-common php7.4-dev libphp7.4-embed php7.4-mysql nginx > /dev/null - + zlib1g-dev libpcre3-dev libargon2-0-dev libsodium-dev \ + php8.0-cli php8.0-dev libphp8.0-embed php8.0-mysql nginx > /dev/null ADD ./ ./ -ENV NGINX_VERSION=1.19.2 +ENV NGINX_VERSION=1.19.6 -RUN git clone -b v0.0.24 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null +RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null RUN wget -q http://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz && \ tar -zxf nginx-${NGINX_VERSION}.tar.gz && \ cd nginx-${NGINX_VERSION} && \ export PHP_LIB=/usr/lib && \ - ./configure --user=www --group=www \ + bash ./configure --user=www --group=www \ --prefix=/nginx \ --with-ld-opt="-Wl,-rpath,$PHP_LIB" \ --add-module=/ngx_php7/third_party/ngx_devel_kit \ diff --git a/frameworks/PHP/swoole/swoole-postgres.dockerfile b/frameworks/PHP/swoole/swoole-postgres.dockerfile index 0ab42af057f..2ef3b63f412 100644 --- a/frameworks/PHP/swoole/swoole-postgres.dockerfile +++ b/frameworks/PHP/swoole/swoole-postgres.dockerfile @@ -1,8 +1,9 @@ -FROM php:7.4 +FROM php:8.0-cli RUN docker-php-ext-install opcache > /dev/null -ENV SWOOLE_VERSION=4.5.1 +ENV SWOOLE_VERSION 4.6.1 +ENV SWOOLE_POSTGRES 4.6.1 RUN apt-get update && apt-get install -y libpq-dev \ && cd /tmp && curl -sSL "https://github.com/swoole/swoole-src/archive/v${SWOOLE_VERSION}.tar.gz" | tar xzf - \ @@ -10,8 +11,8 @@ RUN apt-get update && apt-get install -y libpq-dev \ && phpize && ./configure > /dev/null && make > /dev/null && make install > /dev/null \ && docker-php-ext-enable swoole -RUN cd /tmp && curl -sSL "https://github.com/swoole/ext-postgresql/archive/v${SWOOLE_VERSION}.tar.gz" | tar xzf - \ - && cd ext-postgresql-${SWOOLE_VERSION} \ +RUN cd /tmp && curl -sSL "https://github.com/swoole/ext-postgresql/archive/v${SWOOLE_POSTGRES}.tar.gz" | tar xzf - \ + && cd ext-postgresql-${SWOOLE_POSTGRES} \ && phpize && ./configure > /dev/null && make > /dev/null && make install > /dev/null \ && docker-php-ext-enable swoole_postgresql diff --git a/frameworks/PHP/ubiquity/benchmark_config.json b/frameworks/PHP/ubiquity/benchmark_config.json index 99e8203a425..69e006ecb8c 100644 --- a/frameworks/PHP/ubiquity/benchmark_config.json +++ b/frameworks/PHP/ubiquity/benchmark_config.json @@ -15,7 +15,7 @@ "database": "MySQL", "framework": "ubiquity", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8", "orm": "Full", "platform": "FPM/FastCGI", "webserver": "nginx", @@ -26,8 +26,6 @@ "versus": "php" }, "workerman": { - "json_url": "/Json_", - "plaintext_url": "/Plaintext_", "db_url": "/Db_", "query_url": "/Db_/query/", "fortune_url": "/Fortunes_", @@ -39,7 +37,7 @@ "database": "Postgres", "framework": "Ubiquity", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8", "orm": "Full", "platform": "workerman", "webserver": "none", @@ -114,7 +112,8 @@ "database_os": "Linux", "display_name": "ubiquity-roadrunner", "notes": "", - "versus": "php" + "versus": "php", + "tags": ["broken"] }, "roadrunner-mysql": { "db_url": "/DbMy", @@ -135,7 +134,8 @@ "database_os": "Linux", "display_name": "ubiquity-roadrunner-mysql", "notes": "", - "versus": "php" + "versus": "php", + "tags": ["broken"] }, "workerman-mysql": { "db_url": "/DbMy", @@ -148,7 +148,7 @@ "database": "Mysql", "framework": "Ubiquity", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8", "orm": "Full", "platform": "workerman", "webserver": "none", @@ -169,7 +169,7 @@ "database": "MongoDB", "framework": "Ubiquity", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8", "orm": "Full", "platform": "workerman", "webserver": "none", @@ -179,6 +179,29 @@ "notes": "", "versus": "php" }, + "workerman-raw": { + "json_url": "/Json_", + "plaintext_url": "/Plaintext_", + "db_url": "/DbRaw", + "query_url": "/DbRaw/query/", + "fortune_url": "/FortunesRaw", + "update_url": "/DbRaw/update/", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "Ubiquity", + "language": "PHP", + "flavor": "PHP8", + "orm": "raw", + "platform": "workerman", + "webserver": "none", + "os": "Linux", + "database_os": "Linux", + "display_name": "ubiquity-workerman-raw", + "notes": "", + "versus": "workerman" + }, "ngx": { "json_url": "/Json_", "plaintext_url": "/Plaintext_", diff --git a/frameworks/PHP/ubiquity/composer.json b/frameworks/PHP/ubiquity/composer.json index 732be3ba42f..916eff0c310 100644 --- a/frameworks/PHP/ubiquity/composer.json +++ b/frameworks/PHP/ubiquity/composer.json @@ -5,7 +5,7 @@ }, "require-dev" : { "monolog/monolog" : "^1.24", - "mindplay/annotations" : "^1.3" + "phpmv/ubiquity-annotations" : "^0.0" }, "autoload" : { "psr-4" : { diff --git a/frameworks/PHP/ubiquity/config.toml b/frameworks/PHP/ubiquity/config.toml index d72ba2f9a37..823afacff4b 100644 --- a/frameworks/PHP/ubiquity/config.toml +++ b/frameworks/PHP/ubiquity/config.toml @@ -85,8 +85,6 @@ webserver = "nginx" versus = "ngx_php" [workerman] -urls.plaintext = "/Plaintext_" -urls.json = "/Json_" urls.db = "/Db_" urls.query = "/Db_/query/" urls.update = "/Db_/update/" @@ -132,6 +130,23 @@ platform = "workerman" webserver = "none" versus = "php" +[workerman-raw] +urls.plaintext = "/Plaintext_" +urls.json = "/Json_" +urls.db = "/DbRaw" +urls.query = "/DbRaw/query/" +urls.update = "/DbRaw/update/" +urls.fortune = "/FortunesRaw" +approach = "Realistic" +classification = "Fullstack" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "raw" +platform = "workerman" +webserver = "none" +versus = "php" + [swoole-mysql] urls.db = "/DbMy" urls.query = "/DbMy/query/" diff --git a/frameworks/PHP/ubiquity/deploy/conf/workerman/pgsql/raw/workerServices.php b/frameworks/PHP/ubiquity/deploy/conf/workerman/pgsql/raw/workerServices.php new file mode 100644 index 00000000000..534d97bd2da --- /dev/null +++ b/frameworks/PHP/ubiquity/deploy/conf/workerman/pgsql/raw/workerServices.php @@ -0,0 +1,15 @@ +onWorkerStart = function () use ($config) { + $db = \Ubiquity\db\Database::start('pgsql', $config); + \controllers\DbRaw::warmup($db); + \controllers\FortunesRaw::warmup($db); +}; diff --git a/frameworks/PHP/ubiquity/deploy/conf/workerman/pgsql/workerServices.php b/frameworks/PHP/ubiquity/deploy/conf/workerman/pgsql/workerServices.php index d542180cb02..0c0c62c5223 100644 --- a/frameworks/PHP/ubiquity/deploy/conf/workerman/pgsql/workerServices.php +++ b/frameworks/PHP/ubiquity/deploy/conf/workerman/pgsql/workerServices.php @@ -13,8 +13,6 @@ echo "End Loading\n"; \Ubiquity\cache\CacheManager::warmUpControllers([ - \controllers\Plaintext_::class, - \controllers\Json_::class, \controllers\Db_::class, \controllers\Fortunes_::class, \controllers\Cache::class @@ -25,4 +23,3 @@ \controllers\Db_::warmup(); \controllers\Fortunes_::warmup(); }; - diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile index 61e94a3c2d0..52370c430d2 100644 --- a/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile +++ b/frameworks/PHP/ubiquity/ubiquity-workerman-mongo.dockerfile @@ -1,20 +1,19 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq php7.4 php7.4-common php7.4-cli php7.4-mongodb > /dev/null + apt-get install -yqq git php8.0-cli php8.0-mongodb php8.0-xml > /dev/null RUN apt-get install -yqq composer > /dev/null -RUN apt-get install -y php-pear php-dev libevent-dev > /dev/null +RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null +RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini -RUN printf "\n\n /usr/lib/x86_64-linux-gnu/\n\n\nno\n\n\n" | pecl install event > /dev/null && echo "extension=event.so" > /etc/php/7.4/cli/conf.d/event.ini - -COPY deploy/conf/php-async.ini /etc/php/7.4/cli/php.ini +COPY deploy/conf/php-async.ini /etc/php/8.0/cli/php.ini ADD ./ /ubiquity @@ -37,7 +36,8 @@ RUN chmod 777 -R /ubiquity/.ubiquity/* COPY deploy/conf/workerman/mongo/workerServices.php app/config/workerServices.php -RUN echo "opcache.preload=/ubiquity/app/config/preloader.script.php" >> /etc/php/7.4/cli/php.ini +RUN echo "opcache.preload=/ubiquity/app/config/preloader.script.php" >> /etc/php/8.0/cli/php.ini +RUN echo "opcache.jit_buffer_size=128M\nopcache.jit=tracing\n" >> /etc/php/8.0/cli/php.ini EXPOSE 8080 diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile index d4543695e76..2ccdb298c86 100644 --- a/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile +++ b/frameworks/PHP/ubiquity/ubiquity-workerman-mysql.dockerfile @@ -1,19 +1,18 @@ - -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq php7.4 php7.4-common php7.4-cli php7.4-mysql > /dev/null + apt-get install -yqq git php8.0-cli php8.0-mysql php8.0-xml > /dev/null RUN apt-get install -yqq composer > /dev/null -RUN apt-get install -y php-pear php-dev libevent-dev > /dev/null -RUN printf "\n\n /usr/lib/x86_64-linux-gnu/\n\n\nno\n\n\n" | pecl install event > /dev/null && echo "extension=event.so" > /etc/php/7.4/cli/conf.d/event.ini +RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null +RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini -COPY deploy/conf/php-async.ini /etc/php/7.4/cli/php.ini +COPY deploy/conf/php-async.ini /etc/php/8.0/cli/php.ini ADD ./ /ubiquity WORKDIR /ubiquity @@ -35,7 +34,8 @@ RUN chmod 777 -R /ubiquity/.ubiquity/* COPY deploy/conf/workerman/mysql/workerServices.php app/config/workerServices.php -RUN echo "opcache.preload=/ubiquity/app/config/preloader.script.php" >> /etc/php/7.4/cli/php.ini +RUN echo "opcache.preload=/ubiquity/app/config/preloader.script.php" >> /etc/php/8.0/cli/php.ini +RUN echo "opcache.jit_buffer_size=128M\nopcache.jit=tracing\n" >> /etc/php/8.0/cli/php.ini EXPOSE 8080 diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman-raw.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman-raw.dockerfile new file mode 100644 index 00000000000..197218a7962 --- /dev/null +++ b/frameworks/PHP/ubiquity/ubiquity-workerman-raw.dockerfile @@ -0,0 +1,43 @@ + +FROM ubuntu:20.10 + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null +RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php +RUN apt-get update -yqq > /dev/null && \ + apt-get install -yqq git php8.0-cli php8.0-pgsql php8.0-xml > /dev/null + +RUN apt-get install -yqq composer > /dev/null + +RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null +RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini + +COPY deploy/conf/php-async.ini /etc/php/8.0/cli/php.ini + +ADD ./ /ubiquity +WORKDIR /ubiquity + +RUN chmod -R 777 /ubiquity + +RUN ["chmod", "+x", "deploy/run/install-composer.sh"] + +RUN deploy/run/install-composer.sh + +RUN apt-get update -yqq > /dev/null && \ + apt-get install -yqq git unzip > /dev/null + +RUN php composer.phar require phpmv/ubiquity-devtools:dev-master phpmv/ubiquity-workerman:dev-master --quiet + +RUN php composer.phar install --optimize-autoloader --classmap-authoritative --no-dev --quiet + +RUN chmod 777 -R /ubiquity/.ubiquity/* + +COPY deploy/conf/workerman/pgsql/raw/workerServices.php app/config/workerServices.php + +RUN echo "opcache.preload=/ubiquity/app/config/preloader.script.php\n" >> /etc/php/8.0/cli/php.ini +RUN echo "opcache.jit_buffer_size=128M\nopcache.jit=tracing\n" >> /etc/php/8.0/cli/php.ini + +EXPOSE 8080 + +CMD /ubiquity/vendor/bin/Ubiquity serve -t=workerman -p=8080 -h=0.0.0.0 diff --git a/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile b/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile index fec9c0867ca..deedf6d9727 100644 --- a/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile +++ b/frameworks/PHP/ubiquity/ubiquity-workerman.dockerfile @@ -1,19 +1,19 @@ -FROM ubuntu:20.04 +FROM ubuntu:20.10 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq php7.4 php7.4-common php7.4-cli php7.4-pgsql > /dev/null + apt-get install -yqq git php8.0-cli php8.0-pgsql php8.0-xml > /dev/null RUN apt-get install -yqq composer > /dev/null -RUN apt-get install -y php-pear php-dev libevent-dev > /dev/null -RUN printf "\n\n /usr/lib/x86_64-linux-gnu/\n\n\nno\n\n\n" | pecl install event > /dev/null && echo "extension=event.so" > /etc/php/7.4/cli/conf.d/event.ini +RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null +RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini -COPY deploy/conf/php-async.ini /etc/php/7.4/cli/php.ini +COPY deploy/conf/php-async.ini /etc/php/8.0/cli/php.ini ADD ./ /ubiquity WORKDIR /ubiquity @@ -35,7 +35,8 @@ RUN chmod 777 -R /ubiquity/.ubiquity/* COPY deploy/conf/workerman/pgsql/workerServices.php app/config/workerServices.php -RUN echo "opcache.preload=/ubiquity/app/config/preloader.script.php" >> /etc/php/7.4/cli/php.ini +RUN echo "opcache.preload=/ubiquity/app/config/preloader.script.php\n" >> /etc/php/8.0/cli/php.ini +RUN echo "opcache.jit_buffer_size=128M\nopcache.jit=function\n" >> /etc/php/8.0/cli/php.ini EXPOSE 8080 diff --git a/frameworks/Python/aiohttp/README.md b/frameworks/Python/aiohttp/README.md index df0b2050176..280e739f8a0 100644 --- a/frameworks/Python/aiohttp/README.md +++ b/frameworks/Python/aiohttp/README.md @@ -12,7 +12,7 @@ All test implementations are located within ([./app](app)). ## Description -aiohttp with [aiopg + sqlalchemy](http://aiopg.readthedocs.io/en/stable/sa.html) and +aiohttp with [sqlalchemy](https://docs.sqlalchemy.org/en/14/orm/extensions/asyncio.html) and separately [asyncpg](https://magicstack.github.io/asyncpg/current/) for database access. [uvloop](https://github.com/MagicStack/uvloop) is used for a more performant event loop. @@ -22,7 +22,7 @@ separately [asyncpg](https://magicstack.github.io/asyncpg/current/) for database PostgreSQL. Two variants: -* ORM using [aiopg + sqlalchemy](http://aiopg.readthedocs.io/en/stable/sa.html) +* ORM using [sqlalchemy](https://docs.sqlalchemy.org/en/14/orm/extensions/asyncio.html) * RAW using [asyncpg](https://magicstack.github.io/asyncpg/current/) **To enabled "RAW" mode set the following environment variable:** diff --git a/frameworks/Python/aiohttp/app/main.py b/frameworks/Python/aiohttp/app/main.py index 5bca67bbc36..1bcb6f06615 100644 --- a/frameworks/Python/aiohttp/app/main.py +++ b/frameworks/Python/aiohttp/app/main.py @@ -1,13 +1,11 @@ import os import multiprocessing -from pathlib import Path -import aiohttp_jinja2 -import aiopg.sa import asyncpg -import jinja2 from aiohttp import web from sqlalchemy.engine.url import URL +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker from .views import ( json, @@ -25,20 +23,18 @@ CONNECTION_ORM = os.getenv('CONNECTION', 'ORM').upper() == 'ORM' -THIS_DIR = Path(__file__).parent - def pg_dsn() -> str: """ :return: DSN url suitable for sqlalchemy and aiopg. """ - return str(URL( + return str(URL.create( database='hello_world', password=os.getenv('PGPASS', 'benchmarkdbpass'), host='tfb-database', port='5432', username=os.getenv('PGUSER', 'benchmarkdbuser'), - drivername='postgres', + drivername='postgresql', )) @@ -52,16 +48,14 @@ async def db_ctx(app: web.Application): min_size = max(int(max_size / 2), 1) print(f'connection pool: min size: {min_size}, max size: {max_size}, orm: {CONNECTION_ORM}') if CONNECTION_ORM: - app['pg'] = await aiopg.sa.create_engine(dsn=dsn, minsize=min_size, maxsize=max_size, loop=app.loop) + engine = create_async_engine(dsn, future=True, pool_size=max_size) + app['db_session'] = sessionmaker(engine, class_=AsyncSession) else: app['pg'] = await asyncpg.create_pool(dsn=dsn, min_size=min_size, max_size=max_size, loop=app.loop) yield - if CONNECTION_ORM: - app['pg'].close() - await app['pg'].wait_closed() - else: + if not CONNECTION_ORM: await app['pg'].close() @@ -82,11 +76,6 @@ def setup_routes(app): def create_app(): app = web.Application() - - jinja2_loader = jinja2.FileSystemLoader(str(THIS_DIR / 'templates')) - aiohttp_jinja2.setup(app, loader=jinja2_loader) - app.cleanup_ctx.append(db_ctx) - setup_routes(app) return app diff --git a/frameworks/Python/aiohttp/app/views.py b/frameworks/Python/aiohttp/app/views.py index 488851bb672..1533a1d94fe 100644 --- a/frameworks/Python/aiohttp/app/views.py +++ b/frameworks/Python/aiohttp/app/views.py @@ -1,16 +1,18 @@ from functools import partial from operator import attrgetter, itemgetter +from pathlib import Path from random import randint -from aiohttp_jinja2 import template -from aiohttp.web import Response, json_response +import jinja2 import ujson - +from aiohttp.web import Response, json_response from sqlalchemy import select -from .models import sa_fortunes, sa_worlds, Fortune +from .models import sa_fortunes, sa_worlds, Fortune, World json_response = partial(json_response, dumps=ujson.dumps) +template_path = Path(__file__).parent / 'templates' / 'fortune.jinja' +template = jinja2.Template(template_path.read_text()) def get_num_queries(request): @@ -37,10 +39,11 @@ async def single_database_query_orm(request): Test 2 ORM """ id_ = randint(1, 10000) - async with request.app['pg'].acquire() as conn: - cur = await conn.execute(select([sa_worlds.c.randomnumber]).where(sa_worlds.c.id == id_)) - r = await cur.first() - return json_response({'id': id_, 'randomNumber': r[0]}) + async with request.app['db_session']() as sess: + # TODO(SA1.4.0b2): sess.scalar() + ret = await sess.execute(select(World.randomnumber).filter_by(id=id_)) + num = ret.scalar() + return json_response({'id': id_, 'randomNumber': num}) async def single_database_query_raw(request): @@ -64,11 +67,12 @@ async def multiple_database_queries_orm(request): ids.sort() result = [] - async with request.app['pg'].acquire() as conn: + async with request.app['db_session']() as sess: for id_ in ids: - cur = await conn.execute(select([sa_worlds.c.randomnumber]).where(sa_worlds.c.id == id_)) - r = await cur.first() - result.append({'id': id_, 'randomNumber': r[0]}) + # TODO(SA1.4.0b2): sess.scalar() + ret = await sess.execute(select(World.randomnumber).filter_by(id=id_)) + num = ret.scalar() + result.append({'id': id_, 'randomNumber': num}) return json_response(result) @@ -92,20 +96,19 @@ async def multiple_database_queries_raw(request): return json_response(result) -@template('fortune.jinja') async def fortunes(request): """ Test 4 ORM """ - async with request.app['pg'].acquire() as conn: - cur = await conn.execute(select([sa_fortunes.c.id, sa_fortunes.c.message])) - fortunes = list(await cur.fetchall()) + async with request.app['db_session']() as sess: + ret = await sess.execute(select(Fortune.id, Fortune.message)) + fortunes = ret.all() fortunes.append(Fortune(id=0, message='Additional fortune added at request time.')) fortunes.sort(key=attrgetter('message')) - return {'fortunes': fortunes} + content = template.render(fortunes=fortunes) + return Response(text=content, content_type='text/html') -@template('fortune.jinja') async def fortunes_raw(request): """ Test 4 RAW @@ -114,7 +117,8 @@ async def fortunes_raw(request): fortunes = await conn.fetch('SELECT * FROM Fortune') fortunes.append(dict(id=0, message='Additional fortune added at request time.')) fortunes.sort(key=itemgetter('message')) - return {'fortunes': fortunes} + content = template.render(fortunes=fortunes) + return Response(text=content, content_type='text/html') async def updates(request): @@ -127,21 +131,17 @@ async def updates(request): ids = [randint(1, 10000) for _ in range(num_queries)] ids.sort() - async with request.app['pg'].acquire() as conn: - for id_ in ids: - cur = await conn.execute( - select([sa_worlds.c.randomnumber]) - .where(sa_worlds.c.id == id_) - ) - # the result of this is a dict with the previous random number `randomnumber` which we don't actually use - await cur.first() - rand_new = randint(1, 10000) - await conn.execute( - sa_worlds.update() - .where(sa_worlds.c.id == id_) - .values(randomnumber=rand_new) - ) - result.append({'id': id_, 'randomNumber': rand_new}) + # TODO(SA1.4.0b2): async with request.app['db_session'].begin() as sess: + async with request.app['db_session']() as sess: + async with sess.begin(): + for id_ in ids: + rand_new = randint(1, 10000) + # TODO(SA1.4.0b2): world = await sess.get(World, id_) + ret = await sess.execute(select(World).filter_by(id=id_)) + world = ret.scalar() + world.randomnumber = rand_new + + result.append({'id': id_, 'randomNumber': rand_new}) return json_response(result) async def updates_raw(request): diff --git a/frameworks/Python/aiohttp/requirements.txt b/frameworks/Python/aiohttp/requirements.txt index 54ffcebd55a..259db15bf47 100644 --- a/frameworks/Python/aiohttp/requirements.txt +++ b/frameworks/Python/aiohttp/requirements.txt @@ -1,10 +1,9 @@ aiohttp==3.7.3 -aiohttp-jinja2==1.4.2 -aiopg==1.0.0 asyncpg==0.21.0 cchardet==2.1.7 gunicorn==20.0.4 +jinja2==2.11.2 psycopg2==2.8.6 -SQLAlchemy==1.3.16 +SQLAlchemy==1.4.0b1 ujson==2.0.3 uvloop==0.14.0 diff --git a/frameworks/Python/django/django-postgresql.dockerfile b/frameworks/Python/django/django-postgresql.dockerfile index 16a0c68792e..dc161649ac1 100644 --- a/frameworks/Python/django/django-postgresql.dockerfile +++ b/frameworks/Python/django/django-postgresql.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7-stretch +FROM python:3.9.1-buster ADD ./ /django @@ -8,4 +8,4 @@ RUN pip install -r /django/requirements.txt EXPOSE 8080 -CMD gunicorn --pid=gunicorn.pid hello.wsgi:application -c gunicorn_conf.py --env DJANGO_DB=postgresql_psycopg2 +CMD gunicorn --pid=gunicorn.pid hello.wsgi:application -c gunicorn_conf.py --env DJANGO_DB=postgresql diff --git a/frameworks/Python/django/django.dockerfile b/frameworks/Python/django/django.dockerfile index 578b321f293..207515924da 100644 --- a/frameworks/Python/django/django.dockerfile +++ b/frameworks/Python/django/django.dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7-stretch +FROM python:3.9.1-buster ADD ./ /django diff --git a/frameworks/Python/django/gunicorn_conf.py b/frameworks/Python/django/gunicorn_conf.py index c587c4da8a7..fb41f03e23f 100644 --- a/frameworks/Python/django/gunicorn_conf.py +++ b/frameworks/Python/django/gunicorn_conf.py @@ -24,4 +24,3 @@ def post_fork(server, worker): # Disalbe access log import meinheld.server meinheld.server.set_access_logger(None) - diff --git a/frameworks/Python/django/hello/world/views.py b/frameworks/Python/django/hello/world/views.py index 475e4f44e76..5dd6a35f35f 100644 --- a/frameworks/Python/django/hello/world/views.py +++ b/frameworks/Python/django/hello/world/views.py @@ -38,8 +38,8 @@ def json(request): def db(request): r = _random_int() world = uj_dumps({ - 'id' : r, - 'randomNumber' : World.objects.get(id=r).randomnumber + 'id': r, + 'randomNumber': World.objects.get(id=r).randomnumber }) return HttpResponse(world, content_type="application/json") @@ -49,7 +49,7 @@ def dbs(request): def caller(input_): int_ = _random_int() - return {'id' : int_, 'randomNumber' : World.objects.get(id=int_).randomnumber} + return {'id': int_, 'randomNumber': World.objects.get(id=int_).randomnumber} worlds = tuple(map(caller, range(queries))) return HttpResponse(uj_dumps(worlds), content_type="application/json") @@ -67,10 +67,10 @@ def update(request): queries = _get_queries(request) def caller(input_): - w = World.objects.get(id= _random_int()) + w = World.objects.get(id=_random_int()) w.randomnumber = _random_int() w.save() - return {'id' : w.id, 'randomNumber' : w.randomnumber} + return {'id': w.id, 'randomNumber': w.randomnumber} worlds = tuple(map(caller, range(queries))) return HttpResponse(uj_dumps(worlds), content_type="application/json") diff --git a/frameworks/Python/django/requirements.txt b/frameworks/Python/django/requirements.txt index 89de5523303..c627e72faca 100644 --- a/frameworks/Python/django/requirements.txt +++ b/frameworks/Python/django/requirements.txt @@ -1,8 +1,8 @@ -Django==3.0.5 -greenlet==0.4.15 +Django==3.1.5 +greenlet==0.4.17 gunicorn==20.0.4 -meinheld==1.0.1 +meinheld==1.0.2 mysqlclient==1.4.6 -psycopg2==2.8.5 -pytz==2019.3 -ujson==2.0.3 +psycopg2==2.8.6 +pytz==2020.4 +ujson==4.0.1 \ No newline at end of file diff --git a/frameworks/Rust/ntex/Cargo.toml b/frameworks/Rust/ntex/Cargo.toml index 24729d46e27..fac22df8593 100755 --- a/frameworks/Rust/ntex/Cargo.toml +++ b/frameworks/Rust/ntex/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ntex" -version = "0.1.0" +version = "0.2.0" edition = "2018" [[bin]] @@ -20,19 +20,19 @@ name = "ntex-sailfish" path = "src/main_sailfish.rs" [dependencies] -ntex = "0.1.21" +ntex = "0.2.0-b.5" mimalloc = { version = "0.1.21", default-features = false } yarte = { version = "0.14", features = ["bytes-buf", "json"] } -env_logger = "0.7" +env_logger = "0.8" random-fast-rng = "0.1.1" bytes = "0.5.6" atoi = "0.3.2" -num_cpus = "1.0" -futures = "0.3.4" +num_cpus = "1.13" +futures = "0.3.12" http = "0.2" -smallvec = "1.4.0" -simd-json = "0.3.14" -simd-json-derive = "0.1.9" +smallvec = "1.6.1" +simd-json = "0.3.23" +simd-json-derive = "0.1.15" serde = { version = "1.0", features = ["derive"] } log = { version = "0.4", features = ["release_max_level_off"] } tokio = "=0.2.6" diff --git a/frameworks/Rust/ntex/ntex-db.dockerfile b/frameworks/Rust/ntex/ntex-db.dockerfile index c3a6392f58f..aba80938b28 100644 --- a/frameworks/Rust/ntex/ntex-db.dockerfile +++ b/frameworks/Rust/ntex/ntex-db.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.46 +FROM rust:1.49 # Disable simd at jsonescape ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/ntex-raw.dockerfile b/frameworks/Rust/ntex/ntex-raw.dockerfile index d440c242290..4fe94a1c75f 100644 --- a/frameworks/Rust/ntex/ntex-raw.dockerfile +++ b/frameworks/Rust/ntex/ntex-raw.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.46 +FROM rust:1.49 # Disable simd at jsonescape ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/ntex-sailfish.dockerfile b/frameworks/Rust/ntex/ntex-sailfish.dockerfile index f54eba12ae9..099c4ac0a10 100644 --- a/frameworks/Rust/ntex/ntex-sailfish.dockerfile +++ b/frameworks/Rust/ntex/ntex-sailfish.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.46 +FROM rust:1.49 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/ntex/ntex.dockerfile b/frameworks/Rust/ntex/ntex.dockerfile index 4ce627beaf3..454f0b59401 100644 --- a/frameworks/Rust/ntex/ntex.dockerfile +++ b/frameworks/Rust/ntex/ntex.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.46 +FROM rust:1.49 # Disable simd at jsonescape ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/ntex/src/main_db.rs b/frameworks/Rust/ntex/src/main_db.rs index 435ee7367cc..5b3d011889a 100644 --- a/frameworks/Rust/ntex/src/main_db.rs +++ b/frameworks/Rust/ntex/src/main_db.rs @@ -38,8 +38,7 @@ impl Service for App { } fn call(&self, req: Request) -> Self::Future { - let path = req.path(); - match path { + match req.path() { "/db" => { let h_srv = self.hdr_srv.clone(); let h_ct = self.hdr_ctjson.clone(); diff --git a/frameworks/Rust/ntex/src/main_raw.rs b/frameworks/Rust/ntex/src/main_raw.rs index 4c809a72bf7..31ad41a2010 100644 --- a/frameworks/Rust/ntex/src/main_raw.rs +++ b/frameworks/Rust/ntex/src/main_raw.rs @@ -1,15 +1,13 @@ #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; -use std::future::Future; -use std::io; -use std::pin::Pin; -use std::task::{Context, Poll}; +use std::{ + cell::RefCell, future::Future, io, pin::Pin, rc::Rc, task::Context, task::Poll, +}; -use bytes::BytesMut; -use ntex::codec::{AsyncRead, AsyncWrite, Decoder}; use ntex::fn_service; -use ntex::http::{h1, Request}; +use ntex::framed::{ReadTask, State, WriteTask}; +use ntex::http::h1; use ntex::rt::net::TcpStream; use yarte::Serialize; @@ -27,100 +25,61 @@ pub struct Message { } struct App { - io: TcpStream, - read_buf: BytesMut, - write_buf: BytesMut, - write_pos: usize, + state: State, codec: h1::Codec, } -impl App { - fn handle_request(&mut self, req: Request) { - match req.path() { - "/json" => { - self.write_buf.extend_from_slice(JSON); - self.codec.set_date_header(&mut self.write_buf); - Message { - message: "Hello, World!", - } - .to_bytes_mut(&mut self.write_buf); - } - "/plaintext" => { - self.write_buf.extend_from_slice(PLAIN); - self.codec.set_date_header(&mut self.write_buf); - self.write_buf.extend_from_slice(BODY); - } - _ => { - self.write_buf.extend_from_slice(HTTPNFOUND); - self.write_buf.extend_from_slice(HDR_SERVER); - } - } - } -} - impl Future for App { type Output = Result<(), ()>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { let this = self.as_mut().get_mut(); - - if !this.write_buf.is_empty() { - let len = this.write_buf.len(); - let mut written = this.write_pos; - while written < len { - match Pin::new(&mut this.io).poll_write(cx, &this.write_buf[written..]) { - Poll::Pending => { - break; - } - Poll::Ready(Ok(n)) => { - if n == 0 { - return Poll::Ready(Ok(())); - } else { - written += n; - } - } - Poll::Ready(Err(_)) => return Poll::Ready(Err(())), - } - } - if written == len { - this.write_pos = 0; - unsafe { this.write_buf.set_len(0) } - } else if written > 0 { - this.write_pos = written; - return Poll::Pending; - } - } - - if this.read_buf.capacity() - this.read_buf.len() < 4096 { - this.read_buf.reserve(32_768); + if !this.state.is_open() { + this.state.close(); + return Poll::Ready(Ok(())); } + let mut updated = false; loop { - let read = Pin::new(&mut this.io).poll_read_buf(cx, &mut this.read_buf); - match read { - Poll::Pending => break, - Poll::Ready(Ok(n)) => { - if n == 0 { - return Poll::Ready(Ok(())); + match this.state.decode_item(&this.codec) { + Ok(Some((req, _))) => { + match req.path() { + "/json" => this.state.with_write_buf(|buf| { + buf.extend_from_slice(JSON); + this.codec.set_date_header(buf); + Message { + message: "Hello, World!", + } + .to_bytes_mut(buf); + }), + "/plaintext" => this.state.with_write_buf(|buf| { + buf.extend_from_slice(PLAIN); + this.codec.set_date_header(buf); + buf.extend_from_slice(BODY); + }), + _ => this.state.with_write_buf(|buf| { + buf.extend_from_slice(HTTPNFOUND); + buf.extend_from_slice(HDR_SERVER); + }), } + updated = true; } - Poll::Ready(Err(_)) => return Poll::Ready(Err(())), - } - } - - loop { - match this.codec.decode(&mut this.read_buf) { - Ok(Some(h1::Message::Item(req))) => this.handle_request(req), Ok(None) => break, - _ => return Poll::Ready(Err(())), + _ => { + this.state.close(); + return Poll::Ready(Err(())); + } } } - - if !this.write_buf.is_empty() { - self.poll(cx) - } else { - Poll::Pending + if updated { + this.state.dsp_flush_write_data(cx.waker()); + } + if !this.state.is_read_ready() { + this.state.dsp_read_more_data(cx.waker()); + } else if !updated { + this.state.dsp_register_task(cx.waker()); } + Poll::Pending } } @@ -132,12 +91,16 @@ async fn main() -> io::Result<()> { ntex::server::build() .backlog(1024) .bind("techempower", "0.0.0.0:8080", || { - fn_service(|io: TcpStream| App { - io, - read_buf: BytesMut::with_capacity(32_768), - write_buf: BytesMut::with_capacity(32_768), - write_pos: 0, - codec: h1::Codec::default(), + fn_service(|io: TcpStream| { + let state = State::new(); + let io = Rc::new(RefCell::new(io)); + ntex::rt::spawn(ReadTask::new(io.clone(), state.clone())); + ntex::rt::spawn(WriteTask::new(io, state.clone())); + + App { + state, + codec: h1::Codec::default(), + } }) })? .start() diff --git a/frameworks/Rust/warp-rust/Cargo.toml b/frameworks/Rust/warp-rust/Cargo.toml index facbe6ef502..e9404a24ae4 100644 --- a/frameworks/Rust/warp-rust/Cargo.toml +++ b/frameworks/Rust/warp-rust/Cargo.toml @@ -5,10 +5,15 @@ authors = ["Konrad Borowski "] edition = "2018" [dependencies] -futures = "0.3.1" -rand = { version = "0.7.3", features = ["small_rng"] } -serde = { version = "1.0.103", features = ["derive"] } -tokio = { version = "0.2.21", features = ["macros", "rt-threaded"] } -tokio-postgres = "0.5.1" -warp = "0.2.3" -yarte = "0.12.2" +futures = "0.3.12" +rand = { version = "0.8.2", features = ["small_rng"] } +serde = { version = "1.0.120", features = ["derive"] } +tokio = { version = "1.0.2", features = ["macros", "rt-multi-thread"] } +tokio-postgres = "0.7.0" +warp = "0.3.0" +yarte = "0.14.1" + +[profile.release] +codegen-units = 1 +opt-level = 3 +lto = true diff --git a/frameworks/Rust/warp-rust/warp-rust.dockerfile b/frameworks/Rust/warp-rust/warp-rust.dockerfile index f6e63c8d663..bb36f18899f 100644 --- a/frameworks/Rust/warp-rust/warp-rust.dockerfile +++ b/frameworks/Rust/warp-rust/warp-rust.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.44 +FROM rust:1.49 WORKDIR /warp-rust COPY src src diff --git a/frameworks/Scala/finagle/build.sbt b/frameworks/Scala/finagle/build.sbt index 8e1f268236d..b281a7dda05 100644 --- a/frameworks/Scala/finagle/build.sbt +++ b/frameworks/Scala/finagle/build.sbt @@ -1,4 +1,4 @@ -lazy val finagleVersion = "20.12.0" +lazy val finagleVersion = "21.1.0" name := "finagle-benchmark" scalaVersion := "2.12.8" diff --git a/frameworks/Scala/finatra/build.sbt b/frameworks/Scala/finatra/build.sbt index bb28b48ddbc..a0c401d2cd3 100644 --- a/frameworks/Scala/finatra/build.sbt +++ b/frameworks/Scala/finatra/build.sbt @@ -1,4 +1,4 @@ -lazy val finatraVersion = "20.12.0" +lazy val finatraVersion = "21.1.0" name := "techempower-benchmarks-finatra" organization := "com.twitter" diff --git a/toolset/github_actions/github_actions_diff.py b/toolset/github_actions/github_actions_diff.py index d3f9860402e..db8f5ba702c 100755 --- a/toolset/github_actions/github_actions_diff.py +++ b/toolset/github_actions/github_actions_diff.py @@ -58,24 +58,24 @@ def quit_diffing(): curr_branch = "" is_PR = (os.getenv("PR_NUMBER") != "") -# BRANCH_NAME is the the name of the branch -is_master = os.getenv("BRANCH_NAME") == "master" +previous_commit = os.getenv("PREVIOUS_COMMIT") + +diff_target = os.getenv("TARGET_BRANCH_NAME") if is_PR else previous_commit if is_PR: curr_branch = "HEAD" -elif not is_master: - curr_branch = os.getenv("GITHUB_SHA") - -if not is_master: # Also fetch master to compare against - subprocess.check_output(['bash', '-c', 'git fetch origin master:master']) + subprocess.check_output(['bash', '-c', 'git fetch origin {0}:{0}' + .format(diff_target)]) +else: + curr_branch = os.getenv("GITHUB_SHA") # https://stackoverflow.com/questions/25071579/list-all-files-changed-in-a-pull-request-in-git-github changes = clean_output( subprocess.check_output([ 'bash', '-c', - 'git --no-pager diff --name-only {0} $(git merge-base {0} master)' - .format(curr_branch) + 'git --no-pager diff --name-only {0} $(git merge-base {0} {1})' + .format(curr_branch, diff_target) ])) print("Determining what to run based on the following file changes: \n{!s}" .format('\n'.join(changes.split('\n')[0:10]))) @@ -98,9 +98,19 @@ def quit_diffing(): filter(lambda x: os.path.isdir(dir + x), os.listdir(dir))) elif os.getenv("TESTDIR"): test_dirs = os.getenv("TESTDIR").split(' ') +else: + def get_frameworks(test_lang): + dir = "frameworks/" + test_lang + "/" + return map(lambda x: test_lang + "/" + x, + filter(lambda x: os.path.isdir(dir + x), + os.listdir(dir))) + test_dirs = [] + for frameworks in map(get_frameworks, os.listdir("frameworks")): + for framework in frameworks: + test_dirs.append(framework) # Forced full run -if (not is_PR and is_master) or re.search(r'\[ci run-all\]', last_commit_msg, re.M): +if re.search(r'\[ci run-all\]', last_commit_msg, re.M): print("All tests have been forced to run from the commit message.") run_tests = test_dirs quit_diffing()