diff --git a/.gitattributes b/.gitattributes old mode 100755 new mode 100644 diff --git a/.mailmap b/.mailmap old mode 100755 new mode 100644 diff --git a/Dockerfile b/Dockerfile old mode 100755 new mode 100644 diff --git a/frameworks/C++/ffead-cpp/benchmark_config.json b/frameworks/C++/ffead-cpp/benchmark_config.json index f8e38bf8b7d..5d25a82c35e 100644 --- a/frameworks/C++/ffead-cpp/benchmark_config.json +++ b/frameworks/C++/ffead-cpp/benchmark_config.json @@ -60,7 +60,7 @@ "display_name": "ffead-cpp-crystal-http", "notes": "", "versus": "", - "tags": [] + "tags": ["broken"] }, "d-hunt": { "json_url": "/te-benchmark-um/json", @@ -98,7 +98,7 @@ "display_name": "ffead-cpp-go-gnet", "notes": "", "versus": "", - "tags": [] + "tags": ["broken"] }, "rust-actix": { "json_url": "/te-benchmark-um/json", @@ -143,6 +143,54 @@ "versus": "", "tags": [] }, + "v-picov-raw-profiled": { + "json_url": "/te-benchmark-um-pq/json", + "plaintext_url": "/plaintext", + "db_url": "/te-benchmark-um-pq/db", + "query_url": "/te-benchmark-um-pq/queries?queries=", + "fortune_url": "/te-benchmark-um-pq/fortunes", + "update_url": "/te-benchmark-um-pq/updates?queries=", + "cached_query_url": "/te-benchmark-um-pq/cached-worlds?count=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "ffead-cpp", + "language": "C++", + "orm": "Raw", + "platform": "None", + "webserver": "picov", + "os": "Linux", + "database_os": "Linux", + "display_name": "ffead-cpp-v-picov-raw-profiled", + "notes": "", + "versus": "", + "tags": [] + }, + "v-picov-raw-clibpqb-profiled": { + "json_url": "/te-benchmark-um-pq/json", + "plaintext_url": "/plaintext", + "db_url": "/te-benchmark-um-pq/db", + "query_url": "/te-benchmark-um-pq/queries?queries=", + "fortune_url": "/te-benchmark-um-pq/fortunes", + "update_url": "/te-benchmark-um-pq/updates?queries=", + "cached_query_url": "/te-benchmark-um-pq/cached-worlds?count=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "ffead-cpp", + "language": "C++", + "orm": "Raw", + "platform": "None", + "webserver": "picov", + "os": "Linux", + "database_os": "Linux", + "display_name": "ffead-cpp-v-picov-raw-clibpqb-profiled", + "notes": "", + "versus": "", + "tags": [] + }, "java-firenio": { "json_url": "/te-benchmark-um/json", "plaintext_url": "/plaintext", @@ -183,7 +231,7 @@ "display_name": "ffead-cpp-postgresql", "notes": "", "versus": "", - "tags": [] + "tags": ["broken"] }, "postgresql-raw": { "json_url": "/te-benchmark-um-pq/json", @@ -323,7 +371,7 @@ "port": 8080, "approach": "Realistic", "classification": "Fullstack", - "database": "mongodb", + "database": "None", "framework": "ffead-cpp", "language": "C++", "orm": "Full", @@ -334,7 +382,26 @@ "display_name": "ffead-cpp-nginx", "notes": "", "versus": "", - "tags": [] + "tags": ["broken"] + }, + "seastar": { + "json_url": "/te-benchmark-um/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "None", + "framework": "ffead-cpp", + "language": "C++", + "orm": "None", + "platform": "None", + "webserver": "seastar-http", + "os": "Linux", + "database_os": "Linux", + "display_name": "ffead-cpp-seastar", + "notes": "", + "versus": "", + "tags": ["broken"] }, "apache": { "json_url": "/te-benchmark-um/json", @@ -514,7 +581,7 @@ "display_name": "ffead-cpp-go-fasthttp", "notes": "", "versus": "", - "tags": ["broken"] + "tags": [] }, "julia-http": { "json_url": "/te-benchmark-um/json", @@ -670,4 +737,3 @@ } }] } - diff --git a/frameworks/C++/ffead-cpp/config.toml b/frameworks/C++/ffead-cpp/config.toml index 38127091430..4a5fe884b2e 100644 --- a/frameworks/C++/ffead-cpp/config.toml +++ b/frameworks/C++/ffead-cpp/config.toml @@ -1,20 +1,24 @@ [framework] name = "ffead-cpp" -[java-wizzardo-http] +[main] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" +urls.db = "/te-benchmark-um/db" +urls.query = "/te-benchmark-um/queries?queries=" +urls.update = "/te-benchmark-um/updates?queries=" +urls.fortune = "/te-benchmark-um/fortunes" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "mongodb" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Full" platform = "None" -webserver = "wizzardo-http" +webserver = "ffead-cpp" versus = "" -[go-fasthttp] +[libreactor] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -24,25 +28,10 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "fasthttp" -versus = "" - -[postgresql-raw-async-clibpqb-profiled] -urls.db = "/te-benchmark-um-pq-async/db" -urls.query = "/te-benchmark-um-pq-async/queries?queries=" -urls.update = "/te-benchmark-um-pq-async/bupdates?queries=" -urls.fortune = "/te-benchmark-um-pq-async/fortunes" -approach = "Realistic" -classification = "Fullstack" -database = "postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "None" -webserver = "ffead-cpp" +webserver = "libreactor" versus = "" -[java-rapidoid] +[crystal-http] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -52,25 +41,23 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "rapidoid" +webserver = "crystal-http" versus = "" -[postgresql-raw-profiled] -urls.db = "/te-benchmark-um-pq/db" -urls.query = "/te-benchmark-um-pq/queries?queries=" -urls.update = "/te-benchmark-um-pq/updates?queries=" -urls.fortune = "/te-benchmark-um-pq/fortunes" +[d-hunt] +urls.plaintext = "/plaintext" +urls.json = "/te-benchmark-um/json" approach = "Realistic" classification = "Fullstack" -database = "postgres" +database = "None" database_os = "Linux" os = "Linux" -orm = "Raw" +orm = "None" platform = "None" -webserver = "ffead-cpp" +webserver = "hunt" versus = "" -[crystal-h2o] +[go-gnet] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -80,24 +67,20 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "h2o.cr" +webserver = "gnet" versus = "" -[mysql] +[rust-actix] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" -urls.db = "/te-benchmark-um/db" -urls.query = "/te-benchmark-um/queries?queries=" -urls.update = "/te-benchmark-um/updates?queries=" -urls.fortune = "/te-benchmark-um/fortunes" approach = "Realistic" classification = "Fullstack" -database = "mysql" +database = "None" database_os = "Linux" os = "Linux" -orm = "Full" +orm = "None" platform = "None" -webserver = "ffead-cpp" +webserver = "actix" versus = "" [v-picov] @@ -118,33 +101,43 @@ platform = "None" webserver = "picov" versus = "" -[rust-actix] +[v-picov-raw-profiled] urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um/json" +urls.json = "/te-benchmark-um-pq/json" +urls.db = "/te-benchmark-um-pq/db" +urls.query = "/te-benchmark-um-pq/queries?queries=" +urls.update = "/te-benchmark-um-pq/updates?queries=" +urls.fortune = "/te-benchmark-um-pq/fortunes" +urls.cached_query = "/te-benchmark-um-pq/cached-worlds?count=" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Raw" platform = "None" -webserver = "actix" +webserver = "picov" versus = "" -[cinatra] +[v-picov-raw-clibpqb-profiled] urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um/json" +urls.json = "/te-benchmark-um-pq/json" +urls.db = "/te-benchmark-um-pq/db" +urls.query = "/te-benchmark-um-pq/queries?queries=" +urls.update = "/te-benchmark-um-pq/updates?queries=" +urls.fortune = "/te-benchmark-um-pq/fortunes" +urls.cached_query = "/te-benchmark-um-pq/cached-worlds?count=" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Raw" platform = "None" -webserver = "cinatra" +webserver = "picov" versus = "" -[d-hunt] +[java-firenio] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -154,59 +147,67 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "hunt" +webserver = "firenio" versus = "" -[rust-hyper] +[postgresql] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" +urls.db = "/te-benchmark-um/db" +urls.query = "/te-benchmark-um/queries?queries=" +urls.update = "/te-benchmark-um/updates?queries=" +urls.fortune = "/te-benchmark-um/fortunes" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Full" platform = "None" -webserver = "hyper" +webserver = "ffead-cpp" versus = "" -[lithium] +[postgresql-raw] urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um/json" +urls.json = "/te-benchmark-um-pq/json" +urls.db = "/te-benchmark-um-pq/db" +urls.query = "/te-benchmark-um-pq/queries?queries=" +urls.update = "/te-benchmark-um-pq/updates?queries=" +urls.fortune = "/te-benchmark-um-pq/fortunes" +urls.cached_query = "/te-benchmark-um-pq/cached-worlds?count=" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Raw" platform = "None" -webserver = "lithium" +webserver = "ffead-cpp" versus = "" -[drogon] -urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um/json" +[postgresql-raw-profiled] +urls.db = "/te-benchmark-um-pq/db" +urls.query = "/te-benchmark-um-pq/queries?queries=" +urls.update = "/te-benchmark-um-pq/updates?queries=" +urls.fortune = "/te-benchmark-um-pq/fortunes" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Raw" platform = "None" -webserver = "drogon" +webserver = "ffead-cpp" versus = "" -[mongo-raw] -urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um-mgr/json" -urls.db = "/te-benchmark-um-mgr/db" -urls.query = "/te-benchmark-um-mgr/queries?queries=" -urls.update = "/te-benchmark-um-mgr/updates?queries=" -urls.fortune = "/te-benchmark-um-mgr/fortunes" -urls.cached_query = "/te-benchmark-um-mgr/cached-worlds?count=" +[postgresql-raw-clibpqb-profiled] +urls.db = "/te-benchmark-um-pq/db" +urls.query = "/te-benchmark-um-pq/queries?queries=" +urls.update = "/te-benchmark-um-pq/updates?queries=" +urls.fortune = "/te-benchmark-um-pq/fortunes" approach = "Realistic" classification = "Fullstack" -database = "mongodb" +database = "postgres" database_os = "Linux" os = "Linux" orm = "Raw" @@ -214,40 +215,44 @@ platform = "None" webserver = "ffead-cpp" versus = "" -[rust-thruster] +[postgresql-raw-async] urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um/json" +urls.json = "/te-benchmark-um-pq-async/json" +urls.db = "/te-benchmark-um-pq-async/db" +urls.query = "/te-benchmark-um-pq-async/queries?queries=" +urls.update = "/te-benchmark-um-pq-async/bupdates?queries=" +urls.fortune = "/te-benchmark-um-pq-async/fortunes" +urls.cached_query = "/te-benchmark-um-pq-async/cached-worlds?count=" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Raw" platform = "None" -webserver = "thruster" +webserver = "ffead-cpp" versus = "" -[go-gnet] -urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um/json" +[postgresql-raw-async-profiled] +urls.db = "/te-benchmark-um-pq-async/db" +urls.query = "/te-benchmark-um-pq-async/queries?queries=" +urls.update = "/te-benchmark-um-pq-async/bupdates?queries=" +urls.fortune = "/te-benchmark-um-pq-async/fortunes" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "postgres" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Raw" platform = "None" -webserver = "gnet" +webserver = "ffead-cpp" versus = "" -[postgresql-raw-async] -urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um-pq-async/json" +[postgresql-raw-async-clibpqb-profiled] urls.db = "/te-benchmark-um-pq-async/db" urls.query = "/te-benchmark-um-pq-async/queries?queries=" urls.update = "/te-benchmark-um-pq-async/bupdates?queries=" urls.fortune = "/te-benchmark-um-pq-async/fortunes" -urls.cached_query = "/te-benchmark-um-pq-async/cached-worlds?count=" approach = "Realistic" classification = "Fullstack" database = "postgres" @@ -258,20 +263,20 @@ platform = "None" webserver = "ffead-cpp" versus = "" -[rust-rocket] +[nginx] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" classification = "Fullstack" -database = "None" +database = "mongodb" database_os = "Linux" os = "Linux" -orm = "None" +orm = "Full" platform = "None" -webserver = "rocket" +webserver = "nginx" versus = "" -[nginx] +[seastar] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -281,7 +286,7 @@ database_os = "Linux" os = "Linux" orm = "Full" platform = "None" -webserver = "nginx" +webserver = "seastar-http" versus = "" [apache] @@ -297,14 +302,17 @@ platform = "None" webserver = "apache" versus = "" -[postgresql-raw-async-profiled] -urls.db = "/te-benchmark-um-pq-async/db" -urls.query = "/te-benchmark-um-pq-async/queries?queries=" -urls.update = "/te-benchmark-um-pq-async/bupdates?queries=" -urls.fortune = "/te-benchmark-um-pq-async/fortunes" +[mongo-raw] +urls.plaintext = "/plaintext" +urls.json = "/te-benchmark-um-mgr/json" +urls.db = "/te-benchmark-um-mgr/db" +urls.query = "/te-benchmark-um-mgr/queries?queries=" +urls.update = "/te-benchmark-um-mgr/updates?queries=" +urls.fortune = "/te-benchmark-um-mgr/fortunes" +urls.cached_query = "/te-benchmark-um-mgr/cached-worlds?count=" approach = "Realistic" classification = "Fullstack" -database = "postgres" +database = "mongodb" database_os = "Linux" os = "Linux" orm = "Raw" @@ -312,7 +320,7 @@ platform = "None" webserver = "ffead-cpp" versus = "" -[v-vweb] +[lithium] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -322,45 +330,36 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "vweb" +webserver = "lithium" versus = "" -[postgresql] +[cinatra] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" -urls.db = "/te-benchmark-um/db" -urls.query = "/te-benchmark-um/queries?queries=" -urls.update = "/te-benchmark-um/updates?queries=" -urls.fortune = "/te-benchmark-um/fortunes" approach = "Realistic" classification = "Fullstack" -database = "postgres" +database = "None" database_os = "Linux" os = "Linux" -orm = "Full" +orm = "None" platform = "None" -webserver = "ffead-cpp" +webserver = "cinatra" versus = "" -[postgresql-raw] +[drogon] urls.plaintext = "/plaintext" -urls.json = "/te-benchmark-um-pq/json" -urls.db = "/te-benchmark-um-pq/db" -urls.query = "/te-benchmark-um-pq/queries?queries=" -urls.update = "/te-benchmark-um-pq/updates?queries=" -urls.fortune = "/te-benchmark-um-pq/fortunes" -urls.cached_query = "/te-benchmark-um-pq/cached-worlds?count=" +urls.json = "/te-benchmark-um/json" approach = "Realistic" classification = "Fullstack" -database = "postgres" +database = "None" database_os = "Linux" os = "Linux" -orm = "Raw" +orm = "None" platform = "None" -webserver = "ffead-cpp" +webserver = "drogon" versus = "" -[swift-nio] +[h2o] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -370,10 +369,23 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "swift-nio" +webserver = "h2o" versus = "" -[main] +[crystal-h2o] +urls.plaintext = "/plaintext" +urls.json = "/te-benchmark-um/json" +approach = "Realistic" +classification = "Fullstack" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "None" +platform = "None" +webserver = "h2o.cr" +versus = "" + +[mysql] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" urls.db = "/te-benchmark-um/db" @@ -382,7 +394,7 @@ urls.update = "/te-benchmark-um/updates?queries=" urls.fortune = "/te-benchmark-um/fortunes" approach = "Realistic" classification = "Fullstack" -database = "mongodb" +database = "mysql" database_os = "Linux" os = "Linux" orm = "Full" @@ -390,7 +402,7 @@ platform = "None" webserver = "ffead-cpp" versus = "" -[java-firenio] +[go-fasthttp] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -400,10 +412,10 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "firenio" +webserver = "fasthttp" versus = "" -[libreactor] +[julia-http] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -413,25 +425,23 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "libreactor" +webserver = "julia-http" versus = "" -[postgresql-raw-clibpqb-profiled] -urls.db = "/te-benchmark-um-pq/db" -urls.query = "/te-benchmark-um-pq/queries?queries=" -urls.update = "/te-benchmark-um-pq/updates?queries=" -urls.fortune = "/te-benchmark-um-pq/fortunes" +[swift-nio] +urls.plaintext = "/plaintext" +urls.json = "/te-benchmark-um/json" approach = "Realistic" classification = "Fullstack" -database = "postgres" +database = "None" database_os = "Linux" os = "Linux" -orm = "Raw" +orm = "None" platform = "None" -webserver = "ffead-cpp" +webserver = "swift-nio" versus = "" -[h2o] +[rust-hyper] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -441,10 +451,10 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "h2o" +webserver = "hyper" versus = "" -[crystal-http] +[rust-thruster] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -454,10 +464,10 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "crystal-http" +webserver = "thruster" versus = "" -[julia-http] +[rust-rocket] urls.plaintext = "/plaintext" urls.json = "/te-benchmark-um/json" approach = "Realistic" @@ -467,5 +477,44 @@ database_os = "Linux" os = "Linux" orm = "None" platform = "None" -webserver = "julia-http" +webserver = "rocket" +versus = "" + +[v-vweb] +urls.plaintext = "/plaintext" +urls.json = "/te-benchmark-um/json" +approach = "Realistic" +classification = "Fullstack" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "None" +platform = "None" +webserver = "vweb" +versus = "" + +[java-rapidoid] +urls.plaintext = "/plaintext" +urls.json = "/te-benchmark-um/json" +approach = "Realistic" +classification = "Fullstack" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "None" +platform = "None" +webserver = "rapidoid" versus = "" + +[java-wizzardo-http] +urls.plaintext = "/plaintext" +urls.json = "/te-benchmark-um/json" +approach = "Realistic" +classification = "Fullstack" +database = "None" +database_os = "Linux" +os = "Linux" +orm = "None" +platform = "None" +webserver = "wizzardo-http" +versus = "" \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-apache.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-apache.dockerfile index 8930ddcc376..ab328ddba81 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-apache.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-apache.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 apache mongo +CMD ./run_ffead.sh ffead-cpp-6.0 apache mongo diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-base-debug.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-base-debug.dockerfile index 2831c48fe71..45241104534 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-base-debug.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-base-debug.dockerfile @@ -1,6 +1,6 @@ FROM ubuntu:20.04 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2-debug" +LABEL version="6.0-debug" LABEL description="Base ffead-cpp docker image with commit id - master" ENV IROOT=/installs @@ -13,7 +13,7 @@ RUN mkdir /installs COPY te-benchmark-um/ /installs/te-benchmark-um/ COPY te-benchmark-um-pq/ /installs/te-benchmark-um-pq/ COPY te-benchmark-um-mgr/ /installs/te-benchmark-um-mgr/ -COPY te-benchmark-um-pq-async /installs/te-benchmark-um-pq-async/ +COPY te-benchmark-um-mgr/ /installs/te-benchmark-um-pq-async/ WORKDIR ${IROOT} diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-base.dockerfile index a3426ec1033..891dd506848 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-base.dockerfile @@ -1,6 +1,6 @@ -FROM ubuntu:20.04 +FROM sumeetchhetri/ffead-cpp-deps:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="Base ffead-cpp docker image with commit id - master" ENV IROOT=/installs @@ -9,22 +9,13 @@ ENV DEBUG=off ENV DEBIAN_FRONTEND noninteractive RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections -RUN mkdir /installs COPY te-benchmark-um/ /installs/te-benchmark-um/ COPY te-benchmark-um-pq/ /installs/te-benchmark-um-pq/ COPY te-benchmark-um-mgr/ /installs/te-benchmark-um-mgr/ -COPY te-benchmark-um-pq-async /installs/te-benchmark-um-pq-async/ +COPY te-benchmark-um-pq-async/ /installs/te-benchmark-um-pq-async/ WORKDIR ${IROOT} -COPY install_ffead-cpp-dependencies.sh ${IROOT}/ -RUN chmod 755 ${IROOT}/install_ffead-cpp-dependencies.sh -RUN ./install_ffead-cpp-dependencies.sh - -COPY install_ffead-cpp-backends.sh ${IROOT}/ -RUN chmod 755 ${IROOT}/install_ffead-cpp-backends.sh -RUN ./install_ffead-cpp-backends.sh - COPY install_ffead-cpp-framework.sh install_ffead-cpp-httpd.sh install_ffead-cpp-nginx.sh server.sh ${IROOT}/ RUN chmod 755 ${IROOT}/*.sh RUN ./install_ffead-cpp-framework.sh && ./install_ffead-cpp-httpd.sh && ./install_ffead-cpp-nginx.sh && cd ${IROOT}/ffead-cpp-src && make clean && rm -rf CMakeFiles diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-cinatra.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-cinatra.dockerfile index 2174493c28c..ccd26faecf8 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-cinatra.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-cinatra.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 cinatra +CMD ./run_ffead.sh ffead-cpp-6.0 cinatra diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-crystal-h2o.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-crystal-h2o.dockerfile index 1acff4df3cd..026abac8c85 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-crystal-h2o.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-crystal-h2o.dockerfile @@ -1,13 +1,13 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN apt-get update -y && apt-get install -yqq libh2o-evloop-dev libwslay-dev libyaml-0-2 libevent-dev libpcre3-dev \ @@ -21,6 +21,4 @@ RUN shards install && gcc -shared -O3 lib/h2o/src/ext/h2o.c -I/usr/include -fPIC WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 crystal-h2o +CMD ./run_ffead.sh ffead-cpp-6.0 crystal-h2o diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-crystal-http.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-crystal-http.dockerfile index 02185d200e4..252c43a8955 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-crystal-http.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-crystal-http.dockerfile @@ -1,13 +1,13 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN apt-get update -y && apt install -y --no-install-recommends gnupg && curl -sL "https://keybase.io/crystal/pgp_keys.asc" | apt-key add - \ @@ -18,6 +18,4 @@ RUN crystal build --release --no-debug crystal-ffead-cpp.cr -o crystal-ffead-cpp WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 crystal-http +CMD ./run_ffead.sh ffead-cpp-6.0 crystal-http diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-d-hunt.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-d-hunt.dockerfile index 5426e2ca92c..d3e636adb70 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-d-hunt.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-d-hunt.dockerfile @@ -1,14 +1,14 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs ENV DEBIAN_FRONTEND noninteractive RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig ENV LDC_VERSION 1.24.0 @@ -36,6 +36,4 @@ RUN dub upgrade --verbose && dub build --build=release --arch=x86_64 --compiler= WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 d-hunt +CMD ./run_ffead.sh ffead-cpp-6.0 d-hunt diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-deps.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-deps.dockerfile new file mode 100644 index 00000000000..9154224ae5d --- /dev/null +++ b/frameworks/C++/ffead-cpp/ffead-cpp-deps.dockerfile @@ -0,0 +1,22 @@ +FROM ubuntu:20.04 +LABEL maintainer="Sumeet Chhetri" +LABEL version="6.0" +LABEL description="ffead-cpp docker image with backend dependencies" + +ENV IROOT=/installs +ENV DEBUG=off + +ENV DEBIAN_FRONTEND noninteractive +RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections + +RUN mkdir /installs + +WORKDIR ${IROOT} + +COPY install_ffead-cpp-dependencies.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/install_ffead-cpp-dependencies.sh +RUN ./install_ffead-cpp-dependencies.sh + +COPY install_ffead-cpp-backends.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/install_ffead-cpp-backends.sh +RUN ./install_ffead-cpp-backends.sh diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-drogon.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-drogon.dockerfile index 4e41526a1f6..06c6b9077b9 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-drogon.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-drogon.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 drogon +CMD ./run_ffead.sh ffead-cpp-6.0 drogon diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-go-fasthttp.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-go-fasthttp.dockerfile index 569e51a2e65..fd562fb9dd7 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-go-fasthttp.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-go-fasthttp.dockerfile @@ -1,13 +1,13 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN wget -q https://dl.google.com/go/go1.14.4.linux-amd64.tar.gz && tar -C /usr/local -xzf go1.14.4.linux-amd64.tar.gz @@ -17,6 +17,4 @@ RUN make && cp fasthttp-ffead-cpp $IROOT/ && rm -rf ${IROOT}/lang-server-backend WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 go-fasthttp +CMD ./run_ffead.sh ffead-cpp-6.0 go-fasthttp diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-go-gnet.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-go-gnet.dockerfile index da3155912cb..fec2e69eeff 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-go-gnet.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-go-gnet.dockerfile @@ -1,13 +1,13 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN wget -q https://dl.google.com/go/go1.14.4.linux-amd64.tar.gz && tar -C /usr/local -xzf go1.14.4.linux-amd64.tar.gz @@ -17,6 +17,4 @@ RUN make && cp gnet-ffead-cpp $IROOT/ && rm -rf ${IROOT}/lang-server-backends WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 go-gnet +CMD ./run_ffead.sh ffead-cpp-6.0 go-gnet diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-h2o.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-h2o.dockerfile index a82f1c6e4a4..c0691624b0b 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-h2o.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-h2o.dockerfile @@ -1,14 +1,14 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs ENV DEBIAN_FRONTEND noninteractive RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig WORKDIR ${IROOT} @@ -37,6 +37,4 @@ RUN chmod +x h2o.sh WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 h2o +CMD ./run_ffead.sh ffead-cpp-6.0 h2o diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-java-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-java-base.dockerfile index d1b51688a05..7e2c60a285b 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-java-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-java-base.dockerfile @@ -1,16 +1,16 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="Base java docker image with master code" ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN apt update -yqq && apt install -y --no-install-recommends default-jre maven gradle && rm -rf /var/lib/apt/lists/* diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-java-firenio.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-java-firenio.dockerfile index 38c6b29fc69..f48f9cf06e1 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-java-firenio.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-java-firenio.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-java-base:5.2 +FROM sumeetchhetri/ffead-cpp-java-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 java-firenio +CMD ./run_ffead.sh ffead-cpp-6.0 java-firenio diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-java-rapidoid.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-java-rapidoid.dockerfile index 8a7fc0cc795..5d139a094da 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-java-rapidoid.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-java-rapidoid.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-java-base:5.2 +FROM sumeetchhetri/ffead-cpp-java-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 java-rapidoid +CMD ./run_ffead.sh ffead-cpp-6.0 java-rapidoid diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-java-wizzardo-http.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-java-wizzardo-http.dockerfile index 9526e30a052..b163375f97b 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-java-wizzardo-http.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-java-wizzardo-http.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-java-base:5.2 +FROM sumeetchhetri/ffead-cpp-java-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 java-wizzardo-http +CMD ./run_ffead.sh ffead-cpp-6.0 java-wizzardo-http diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-julia-http.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-julia-http.dockerfile index 76e533af033..a322fe557ea 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-julia-http.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-julia-http.dockerfile @@ -1,14 +1,14 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs ENV DEBIAN_FRONTEND noninteractive RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig WORKDIR ${IROOT} @@ -25,7 +25,4 @@ RUN julia -e 'import Pkg; Pkg.update()' && \ WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 julia-http - +CMD ./run_ffead.sh ffead-cpp-6.0 julia-http diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-libreactor.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-libreactor.dockerfile index 8202ce710b9..0d019709db8 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-libreactor.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-libreactor.dockerfile @@ -1,13 +1,13 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig WORKDIR ${IROOT} @@ -24,6 +24,4 @@ RUN make && cp libreactor-ffead-cpp $IROOT/ && rm -rf ${IROOT}/lang-server-backe WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 libreactor +CMD ./run_ffead.sh ffead-cpp-6.0 libreactor diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-lithium.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-lithium.dockerfile index e77b96bf319..851568e722a 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-lithium.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-lithium.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 lithium +CMD ./run_ffead.sh ffead-cpp-6.0 lithium diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-mongo-raw.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-mongo-raw.dockerfile index a6234af4828..40f54d36d0c 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-mongo-raw.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-mongo-raw.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 emb mongo-raw memory +CMD ./run_ffead.sh ffead-cpp-6.0 emb mongo-raw memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-mysql.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-mysql.dockerfile index 72c9b7d40fa..06894663fdb 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-mysql.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-mysql.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb mysql +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb mysql diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-nginx.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-nginx.dockerfile index 66293e7f205..538cdc3a7d7 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-nginx.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-nginx.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 nginx mongo +CMD ./run_ffead.sh ffead-cpp-6.0 nginx mongo diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-clibpqb-profiled.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-clibpqb-profiled.dockerfile index 79f8d17bca0..854ea8df9b7 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-clibpqb-profiled.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-clibpqb-profiled.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-sql-raw-async-clibpqb-profiled-base:5.2 +FROM sumeetchhetri/ffead-cpp-sql-raw-async-clibpqb-profiled-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb postgresql-raw-async memory +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb postgresql-raw-async memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-profiled.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-profiled.dockerfile index 4e5c2777e8d..121a6bd8c3e 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-profiled.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async-profiled.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-sql-raw-async-profiled-base:5.2 +FROM sumeetchhetri/ffead-cpp-sql-raw-async-profiled-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb postgresql-raw-async memory +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb postgresql-raw-async memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async.dockerfile index 0d3462c7fa1..6c7698853ed 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-async.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb postgresql-raw-async memory +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb postgresql-raw-async memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-clibpqb-profiled.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-clibpqb-profiled.dockerfile index 54259d38dea..8dd6c8db697 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-clibpqb-profiled.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-clibpqb-profiled.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-sql-raw-clibpqb-profiled-base:5.2 +FROM sumeetchhetri/ffead-cpp-sql-raw-clibpqb-profiled-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb postgresql-raw memory +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb postgresql-raw memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-profiled.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-profiled.dockerfile index 7d8e480eb90..477dce0c9e7 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-profiled.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw-profiled.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-sql-raw-profiled-base:5.2 +FROM sumeetchhetri/ffead-cpp-sql-raw-profiled-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb postgresql-raw memory +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb postgresql-raw memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw.dockerfile index f6291639a1f..80116b803cd 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql-raw.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb postgresql-raw memory +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb postgresql-raw memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql.dockerfile index ea0a303d38f..408d924d160 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-postgresql.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-postgresql.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0-sql emb postgresql redis +CMD ./run_ffead.sh ffead-cpp-6.0-sql emb postgresql redis diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-rust-actix.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-rust-actix.dockerfile index a4ca8faf77c..dd166fa5476 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-rust-actix.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-rust-actix.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-rust-base:5.2 +FROM sumeetchhetri/ffead-cpp-rust-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 rust-actix +CMD ./run_ffead.sh ffead-cpp-6.0 rust-actix diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-rust-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-rust-base.dockerfile index 60aa5694559..636a82bf224 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-rust-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-rust-base.dockerfile @@ -1,16 +1,16 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="Base rust docker image with ffead-cpp v4.0 - commit id - master" ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-rust-hyper.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-rust-hyper.dockerfile index af4821850ec..b7d777f909a 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-rust-hyper.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-rust-hyper.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-rust-base:5.2 +FROM sumeetchhetri/ffead-cpp-rust-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 rust-hyper +CMD ./run_ffead.sh ffead-cpp-6.0 rust-hyper diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket-base.dockerfile index f677b059c3a..cf4b0edba8d 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket-base.dockerfile @@ -1,16 +1,16 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" -LABEL description="Base rust rocket docker image with ffead-cpp v5.0 - commit id - master" +LABEL version="6.0" +LABEL description="Base rust rocket docker image with ffead-cpp v6.0 - commit id - master" ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket.dockerfile index d5973abef77..a8afa36bd3c 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-rust-rocket.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-rust-rocket-base:5.2 +FROM sumeetchhetri/ffead-cpp-rust-rocket-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 rust-rocket +CMD ./run_ffead.sh ffead-cpp-6.0 rust-rocket diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-rust-thruster.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-rust-thruster.dockerfile index 9fba366a759..5c12723cb60 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-rust-thruster.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-rust-thruster.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-rust-base:latest +FROM sumeetchhetri/ffead-cpp-rust-base:latest ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 rust-thruster +CMD ./run_ffead.sh ffead-cpp-6.0 rust-thruster diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-seastar-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-seastar-base.dockerfile new file mode 100644 index 00000000000..96d27508ff0 --- /dev/null +++ b/frameworks/C++/ffead-cpp/ffead-cpp-seastar-base.dockerfile @@ -0,0 +1,59 @@ +FROM sumeetchhetri/ffead-cpp-base:6.0 + +#seastar needs ubuntu 20 and boost >= 1.66 +WORKDIR ${IROOT} + +ENV DEBIAN_FRONTEND noninteractive + +RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um-pq.so /usr/local/lib/libte-benchmark-um-pq.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um-mgr.so /usr/local/lib/libte-benchmark-um-mgr.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um-pq-async.so /usr/local/lib/libte-benchmark-um-pq-async.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ldconfig + +ENV FFEAD_CPP_PATH=${IROOT}/ffead-cpp-6.0 +ENV LD_LIBRARY_PATH=${IROOT}/:${IROOT}/lib:${FFEAD_CPP_PATH}/lib:/usr/local/lib:$LD_LIBRARY_PATH + +#seastar need hwloc 2 +RUN cd /tmp && wget -q https://github.com/open-mpi/hwloc/releases/download/hwloc-2.1.0/hwloc-2.1.0.tar.gz && \ + tar xvf hwloc-2.1.0.tar.gz && cd hwloc-2.1.0 && ./configure --prefix=/usr/local/ && make install +RUN rm -rf /tmp/hwloc-2.1.0 + +RUN apt update -y && apt install -y pkg-config liblzma-dev libunistring-dev libudev-dev bridge-utils \ + net-tools iproute2 kmod sudo qemu-kvm libvirt-clients libvirt-daemon-system + +RUN sudo adduser $(whoami) libvirt +RUN sudo adduser $(whoami) kvm +RUN sudo adduser $(whoami) libvirt-qemu +RUN sudo adduser $(whoami) libvirt-dnsmasq +#RUN sudo chown $(whoami) /dev/kvm +#RUN sudo chmod 777 /dev/kvm + +#seastar needs gcc-10 +RUN git clone https://github.com/sumeetchhetri/seastar && cd seastar && git checkout for_ffead +RUN cd seastar && chmod +x *.sh && apt update -y && ./install-dependencies.sh && apt remove -y libfmt-dev && \ + ./configure.py --mode=release --cook fmt && ./configure.py --mode=release --prefix=/usr/local +RUN cd seastar && ninja -C build/release install && cp build/release/_cooking/installed/lib/libfmt.a /usr/local/lib/ && \ + cp -rf build/release/_cooking/installed/include/fmt /usr/local/include/ && cp apps/lib/stop_signal.hh /${IROOT}/lang-server-backends/c++/seastar && \ + cd ${IROOT} && rm -rf ${IROOT}/seastar && mkdir -p ${IROOT}/seastar/build/release/_cooking/installed/lib/ && \ + cp /usr/local/lib/libfmt.a ${IROOT}/seastar/build/release/_cooking/installed/lib/ + +WORKDIR ${IROOT}/lang-server-backends/c++/seastar + +#RUN g++ -g SeastarFfeadCpp.cpp -I/home/mavuser/ffead-cpp-6.0/include/ -I/usr/include/libmongoc-1.0 \ +# -I/usr/include/libbson-1.0 -I. -I/usr/local/include $(pkg-config --libs --cflags --static seastar) -lffead-framework \ +# -lffead-modules -o ffead-cpp-seastar + +RUN g++ SeastarFfeadCpp.cpp -O3 -I. $(pkg-config --libs --cflags --static seastar) -lffead-framework -o ffead-cpp-seastar + +RUN chmod +x run.sh + +WORKDIR / + +CMD ./run_ffead.sh ffead-cpp-6.0 seastar + diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-seastar.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-seastar.dockerfile new file mode 100644 index 00000000000..7b60874ce4f --- /dev/null +++ b/frameworks/C++/ffead-cpp/ffead-cpp-seastar.dockerfile @@ -0,0 +1,5 @@ +FROM sumeetchhetri/ffead-cpp-seastar:6.0 + +WORKDIR / + +CMD ./run_ffead.sh ffead-cpp-6.0 seastar diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-clibpqb-profiled-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-clibpqb-profiled-base.dockerfile index 5ee60e1863e..589f89e57da 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-clibpqb-profiled-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-clibpqb-profiled-base.dockerfile @@ -1,6 +1,6 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="SQL Raw Custom libpq batch patched Base ffead-cpp docker image with commit id - master" WORKDIR /tmp @@ -79,7 +79,15 @@ ENV accept accept WORKDIR ${IROOT} +COPY sql-profiled-util.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/sql-profiled-util.sh +RUN ./sql-profiled-util.sh batch clang async + +#COPY TeBkUmLpqAsync.cpp ${IROOT}/ffead-cpp-src/web/te-benchmark-um-pq-async/src/ +#COPY TeBkUmLpqAsync.h ${IROOT}/ffead-cpp-src/web/te-benchmark-um-pq-async/include/ +#COPY LibpqDataSourceImpl.cpp ${IROOT}/ffead-cpp-src/src/modules/sdorm/sql/libpq/ +#COPY LibpqDataSourceImpl.h ${IROOT}/ffead-cpp-src/src/modules/sdorm/sql/libpq/ + COPY sql-async-profiled-install-clang.sh install_ffead-cpp-sql-raw-profiled.sh ${IROOT}/ RUN chmod 755 ${IROOT}/sql-async-profiled-install-clang.sh ${IROOT}/install_ffead-cpp-sql-raw-profiled.sh - RUN ./sql-async-profiled-install-clang.sh batch diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-profiled-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-profiled-base.dockerfile index f6d19fe7429..ac0b19e9cf7 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-profiled-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-async-profiled-base.dockerfile @@ -1,6 +1,6 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="SQL Raw Base ffead-cpp docker image with commit id - master" WORKDIR /tmp @@ -79,7 +79,10 @@ ENV accept accept WORKDIR ${IROOT} +COPY sql-profiled-util.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/sql-profiled-util.sh +RUN ./sql-profiled-util.sh nobatch noclang async + COPY sql-async-profiled-install.sh install_ffead-cpp-sql-raw-profiled.sh ${IROOT}/ RUN chmod 755 ${IROOT}/sql-async-profiled-install.sh ${IROOT}/install_ffead-cpp-sql-raw-profiled.sh - RUN ./sql-async-profiled-install.sh diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-clibpqb-profiled-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-clibpqb-profiled-base.dockerfile index 692eb1cb91e..8e016159afb 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-clibpqb-profiled-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-clibpqb-profiled-base.dockerfile @@ -1,6 +1,6 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="SQL Raw Custom libpq batch patched Base ffead-cpp docker image with commit id - master" WORKDIR /tmp @@ -79,7 +79,10 @@ ENV accept accept WORKDIR ${IROOT} +COPY sql-profiled-util.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/sql-profiled-util.sh +RUN ./sql-profiled-util.sh batch clang noasync + COPY sql-profiled-install-clang.sh install_ffead-cpp-sql-raw-profiled.sh ${IROOT}/ RUN chmod 755 ${IROOT}/sql-profiled-install-clang.sh ${IROOT}/install_ffead-cpp-sql-raw-profiled.sh - RUN ./sql-profiled-install-clang.sh batch diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-profiled-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-profiled-base.dockerfile index c450cdd74ff..384f1defd39 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-profiled-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-sql-raw-profiled-base.dockerfile @@ -1,6 +1,6 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="SQL Raw Base ffead-cpp docker image with commit id - master" WORKDIR /tmp @@ -79,7 +79,10 @@ ENV accept accept WORKDIR ${IROOT} +COPY sql-profiled-util.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/sql-profiled-util.sh +RUN ./sql-profiled-util.sh nobatch noclang noasync + COPY sql-profiled-install.sh install_ffead-cpp-sql-raw-profiled.sh ${IROOT}/ RUN chmod 755 ${IROOT}/sql-profiled-install.sh ${IROOT}/install_ffead-cpp-sql-raw-profiled.sh - RUN ./sql-profiled-install.sh diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-swift-nio.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-swift-nio.dockerfile index 3a1da3ab644..a7fd097a8fd 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-swift-nio.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-swift-nio.dockerfile @@ -1,24 +1,24 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs ENV DEBIAN_FRONTEND noninteractive RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig WORKDIR ${IROOT} RUN apt-get update -y && apt-get install -y --no-install-recommends clang libicu-dev libpython2.7-dev libtinfo5 libncurses5 libz3-dev \ && rm -rf /var/lib/apt/lists/* - -RUN wget -q https://swift.org/builds/swift-5.3-release/ubuntu2004/swift-5.3-RELEASE/swift-5.3-RELEASE-ubuntu20.04.tar.gz -RUN tar -xzf swift-5.3-RELEASE-ubuntu20.04.tar.gz -RUN mv swift-5.3-RELEASE-ubuntu20.04 /opt/ && rm -f swift-5.3-RELEASE-ubuntu20.04.tar.gz -RUN ln -s /opt/swift-5.3-RELEASE-ubuntu20.04 /opt/swift + +RUN wget -q https://swift.org/builds/swift-6.0-release/ubuntu2004/swift-6.0-RELEASE/swift-6.0-RELEASE-ubuntu20.04.tar.gz +RUN tar -xzf swift-6.0-RELEASE-ubuntu20.04.tar.gz +RUN mv swift-6.0-RELEASE-ubuntu20.04 /opt/ && rm -f swift-6.0-RELEASE-ubuntu20.04.tar.gz +RUN ln -s /opt/swift-6.0-RELEASE-ubuntu20.04 /opt/swift ENV PATH=/opt/swift/usr/bin:${PATH} @@ -28,6 +28,4 @@ RUN swift build --enable-test-discovery -c release -Xlinker "-L/usr/local/lib" - WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 swift-nio +CMD ./run_ffead.sh ffead-cpp-6.0 swift-nio diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-v-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-v-base.dockerfile index 2b75705aee6..038e307a52f 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-v-base.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-v-base.dockerfile @@ -1,20 +1,24 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 LABEL maintainer="Sumeet Chhetri" -LABEL version="5.2" +LABEL version="6.0" LABEL description="Base v docker image with ffead-cpp v4.0 commit id - master" ENV IROOT=/installs RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libinter.so /usr/local/lib/libinter.so && \ - ln -s ${IROOT}/ffead-cpp-5.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0/lib/libdinter.so /usr/local/lib/libdinter.so && \ ldconfig RUN apt update -yqq && apt install -y git make && rm -rf /var/lib/apt/lists/* -RUN git clone https://github.com/vlang/v && cd v && make && ./v symlink +#For Latest vlang, uncomment the below mentioned line, due to lot of new enhancements and unsafe block handling, vlnag has slowed down tremendously +#RUN git clone https://github.com/vlang/v && cd v && make && ./v symlink + +#For the fastest vlang performance, use 0.1.29, where the unsafe changes were only restricted to pointer arithmetic +RUN wget -q https://github.com/vlang/v/releases/download/0.1.29/v_linux.zip && unzip -q v_linux.zip && cd v && chmod +x v && ./v symlink && cd .. && rm -f v_linux.zip WORKDIR ${IROOT}/lang-server-backends/v/vweb #COPY vweb.v ${IROOT}/lang-server-backends/v/vweb/ diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-clibpqb-profiled-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-clibpqb-profiled-base.dockerfile new file mode 100644 index 00000000000..0c78eed712a --- /dev/null +++ b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-clibpqb-profiled-base.dockerfile @@ -0,0 +1,98 @@ +FROM sumeetchhetri/ffead-cpp-sql-raw-clibpqb-profiled-base:6.0 +LABEL maintainer="Sumeet Chhetri" +LABEL version="6.0" +LABEL description="SQL Raw Custom libpq batch patched Base ffead-cpp-v-picov-profiled docker image with commit id - master" + +RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ldconfig + +RUN apt update -yqq && apt install -y git make && rm -rf /var/lib/apt/lists/* +#For Latest vlang, uncomment the below mentioned line, due to lot of new enhancements and unsafe block handling, vlnag has slowed down tremendously +#RUN git clone https://github.com/vlang/v && cd v && make && ./v symlink + +#For the fastest vlang performance, use 0.1.29, where the unsafe changes were only restricted to pointer arithmetic +RUN wget -q https://github.com/vlang/v/releases/download/0.1.29/v_linux.zip && unzip -q v_linux.zip && cd v && chmod +x v && ./v symlink && cd .. && rm -f v_linux.zip + +WORKDIR /tmp +RUN mkdir postgresql + +COPY postgresql/* /tmp/postgresql/ + +#POSTGRESQL +WORKDIR /tmp/postgresql/ + +# prepare PostgreSQL APT repository +RUN apt-get -yqq update && apt-get -yqq install locales gnupg lsb-release + +RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - +RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list + +ENV PG_VERSION 13 +RUN locale-gen en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 +ENV DEBIAN_FRONTEND noninteractive + +# install postgresql on database machine +RUN apt-get -yqq update && apt-get -yqq install postgresql-13 postgresql-contrib-13 && rm -rf /var/lib/apt/lists/* + +# Make sure all the configuration files in main belong to postgres +RUN mv postgresql.conf /etc/postgresql/${PG_VERSION}/main/postgresql.conf +RUN mv pg_hba.conf /etc/postgresql/${PG_VERSION}/main/pg_hba.conf + +RUN chown -Rf postgres:postgres /etc/postgresql/${PG_VERSION}/main + +RUN cp -R -p /var/lib/postgresql/${PG_VERSION}/main /ssd/postgresql +RUN cp /etc/postgresql/${PG_VERSION}/main/postgresql.conf /ssd/postgresql +RUN mv 60-postgresql-shm.conf /etc/sysctl.d/60-postgresql-shm.conf + +RUN chown -Rf postgres:postgres /var/run/postgresql +RUN chmod 2777 /var/run/postgresql +RUN chown postgres:postgres /etc/sysctl.d/60-postgresql-shm.conf +RUN chown postgres:postgres create-postgres* +RUN chown -Rf postgres:postgres /ssd + +ENV PGDATA=/ssd/postgresql + +USER postgres + +# We have to wait for postgres to start before we can use the cli +RUN service postgresql start && \ + until psql -c "\q"; do sleep 1; done && \ + psql < create-postgres-database.sql && \ + psql -a hello_world < create-postgres.sql && \ + service postgresql stop +#POSTGRESQL + +USER root + +#WRK +WORKDIR /tmp/wrk +RUN apt-get -yqq update && apt-get -yqq install libluajit-5.1-dev libssl-dev luajit && rm -rf /var/lib/apt/lists/* && \ + curl -sL https://github.com/wg/wrk/archive/4.1.0.tar.gz | tar xz --strip-components=1 +ENV LDFLAGS="-O3 -march=native -flto" +ENV CFLAGS="-I /usr/include/luajit-2.1 $LDFLAGS" +RUN make WITH_LUAJIT=/usr WITH_OPENSSL=/usr -j "$(nproc)" +RUN cp wrk /usr/local/bin + +ENV name name +ENV server_host server_host +ENV levels levels +ENV duration duration +ENV max_concurrency max_concurrency +ENV max_threads max_threads +ENV pipeline pipeline +ENV accept accept +#WRK + +WORKDIR ${IROOT} + +COPY sql-v-picov-profiled-install.sh install_ffead-cpp-sql-raw-v-picov-profiled.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/sql-v-picov-profiled-install.sh ${IROOT}/install_ffead-cpp-sql-raw-v-picov-profiled.sh +RUN ./sql-v-picov-profiled-install.sh diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-clibpqb-profiled.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-clibpqb-profiled.dockerfile new file mode 100644 index 00000000000..85e0ccd3ad4 --- /dev/null +++ b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-clibpqb-profiled.dockerfile @@ -0,0 +1,7 @@ +FROM sumeetchhetri/ffead-cpp-v-picov-raw-clibpqb-profiled-base:6.0 + +ENV IROOT=/installs + +WORKDIR / + +CMD ./run_ffead.sh ffead-cpp-6.0-sql v-picov postgresql-raw memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-profiled-base.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-profiled-base.dockerfile new file mode 100644 index 00000000000..a5d8ab4bcd4 --- /dev/null +++ b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-profiled-base.dockerfile @@ -0,0 +1,98 @@ +FROM sumeetchhetri/ffead-cpp-sql-raw-profiled-base:6.0 +LABEL maintainer="Sumeet Chhetri" +LABEL version="6.0" +LABEL description="SQL Raw Base ffead-cpp-v-picov-profiled docker image with commit id - master" + +RUN rm -f /usr/local/lib/libffead-* /usr/local/lib/libte_benc* /usr/local/lib/libinter.so /usr/local/lib/libdinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libffead-modules.so /usr/local/lib/libffead-modules.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libffead-framework.so /usr/local/lib/libffead-framework.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libinter.so /usr/local/lib/libinter.so && \ + ln -s ${IROOT}/ffead-cpp-6.0-sql/lib/libdinter.so /usr/local/lib/libdinter.so && \ + ldconfig + +RUN apt update -yqq && apt install -y git make && rm -rf /var/lib/apt/lists/* +#For Latest vlang, uncomment the below mentioned line, due to lot of new enhancements and unsafe block handling, vlnag has slowed down tremendously +#RUN git clone https://github.com/vlang/v && cd v && make && ./v symlink + +#For the fastest vlang performance, use 0.1.29, where the unsafe changes were only restricted to pointer arithmetic +RUN wget -q https://github.com/vlang/v/releases/download/0.1.29/v_linux.zip && unzip -q v_linux.zip && cd v && chmod +x v && ./v symlink && cd .. && rm -f v_linux.zip + +WORKDIR /tmp +RUN mkdir postgresql + +COPY postgresql/* /tmp/postgresql/ + +#POSTGRESQL +WORKDIR /tmp/postgresql/ + +# prepare PostgreSQL APT repository +RUN apt-get -yqq update && apt-get -yqq install locales gnupg lsb-release + +RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - +RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list + +ENV PG_VERSION 13 +RUN locale-gen en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 +ENV DEBIAN_FRONTEND noninteractive + +# install postgresql on database machine +RUN apt-get -yqq update && apt-get -yqq install postgresql-13 postgresql-contrib-13 && rm -rf /var/lib/apt/lists/* + +# Make sure all the configuration files in main belong to postgres +RUN mv postgresql.conf /etc/postgresql/${PG_VERSION}/main/postgresql.conf +RUN mv pg_hba.conf /etc/postgresql/${PG_VERSION}/main/pg_hba.conf + +RUN chown -Rf postgres:postgres /etc/postgresql/${PG_VERSION}/main + +RUN cp -R -p /var/lib/postgresql/${PG_VERSION}/main /ssd/postgresql +RUN cp /etc/postgresql/${PG_VERSION}/main/postgresql.conf /ssd/postgresql +RUN mv 60-postgresql-shm.conf /etc/sysctl.d/60-postgresql-shm.conf + +RUN chown -Rf postgres:postgres /var/run/postgresql +RUN chmod 2777 /var/run/postgresql +RUN chown postgres:postgres /etc/sysctl.d/60-postgresql-shm.conf +RUN chown postgres:postgres create-postgres* +RUN chown -Rf postgres:postgres /ssd + +ENV PGDATA=/ssd/postgresql + +USER postgres + +# We have to wait for postgres to start before we can use the cli +RUN service postgresql start && \ + until psql -c "\q"; do sleep 1; done && \ + psql < create-postgres-database.sql && \ + psql -a hello_world < create-postgres.sql && \ + service postgresql stop +#POSTGRESQL + +USER root + +#WRK +WORKDIR /tmp/wrk +RUN apt-get -yqq update && apt-get -yqq install libluajit-5.1-dev libssl-dev luajit && rm -rf /var/lib/apt/lists/* && \ + curl -sL https://github.com/wg/wrk/archive/4.1.0.tar.gz | tar xz --strip-components=1 +ENV LDFLAGS="-O3 -march=native -flto" +ENV CFLAGS="-I /usr/include/luajit-2.1 $LDFLAGS" +RUN make WITH_LUAJIT=/usr WITH_OPENSSL=/usr -j "$(nproc)" +RUN cp wrk /usr/local/bin + +ENV name name +ENV server_host server_host +ENV levels levels +ENV duration duration +ENV max_concurrency max_concurrency +ENV max_threads max_threads +ENV pipeline pipeline +ENV accept accept +#WRK + +WORKDIR ${IROOT} + +COPY sql-v-picov-profiled-install.sh install_ffead-cpp-sql-raw-v-picov-profiled.sh ${IROOT}/ +RUN chmod 755 ${IROOT}/sql-v-picov-profiled-install.sh ${IROOT}/install_ffead-cpp-sql-raw-v-picov-profiled.sh +RUN ./sql-v-picov-profiled-install.sh diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-profiled.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-profiled.dockerfile new file mode 100644 index 00000000000..9db68366eb5 --- /dev/null +++ b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov-raw-profiled.dockerfile @@ -0,0 +1,7 @@ +FROM sumeetchhetri/ffead-cpp-v-picov-raw-profiled-base:6.0 + +ENV IROOT=/installs + +WORKDIR / + +CMD ./run_ffead.sh ffead-cpp-6.0-sql v-picov postgresql-raw memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-v-picov.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov.dockerfile index cfd87d991b9..1286bd5dff4 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-v-picov.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-v-picov.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-v-base:5.2 +FROM sumeetchhetri/ffead-cpp-v-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 v-picov postgresql-raw memory +CMD ./run_ffead.sh ffead-cpp-6.0 v-picov postgresql-raw memory diff --git a/frameworks/C++/ffead-cpp/ffead-cpp-v-vweb.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp-v-vweb.dockerfile index 8695c071d1d..8514209f8ed 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp-v-vweb.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp-v-vweb.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-v-base:5.2 +FROM sumeetchhetri/ffead-cpp-v-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 v-vweb +CMD ./run_ffead.sh ffead-cpp-6.0 v-vweb diff --git a/frameworks/C++/ffead-cpp/ffead-cpp.dockerfile b/frameworks/C++/ffead-cpp/ffead-cpp.dockerfile index b3c17a947f2..a1d1cd9435c 100644 --- a/frameworks/C++/ffead-cpp/ffead-cpp.dockerfile +++ b/frameworks/C++/ffead-cpp/ffead-cpp.dockerfile @@ -1,9 +1,7 @@ -FROM sumeetchhetri/ffead-cpp-5.0-base:5.2 +FROM sumeetchhetri/ffead-cpp-base:6.0 ENV IROOT=/installs WORKDIR / -EXPOSE 8080 - -CMD ./run_ffead.sh ffead-cpp-5.0 emb mongo redis +CMD ./run_ffead.sh ffead-cpp-6.0 emb mongo redis diff --git a/frameworks/C++/ffead-cpp/install_ffead-cpp-backends.sh b/frameworks/C++/ffead-cpp/install_ffead-cpp-backends.sh index da367087263..480cb0214b3 100644 --- a/frameworks/C++/ffead-cpp/install_ffead-cpp-backends.sh +++ b/frameworks/C++/ffead-cpp/install_ffead-cpp-backends.sh @@ -2,15 +2,6 @@ cd $IROOT -git clone https://github.com/sumeetchhetri/ffead-cpp -cd ffead-cpp -git checkout 4e98d8ba1a11505a0b7b450285b20ac0ad7a104f -b 5.0 -rm -rf .git -cd .. -mv ffead-cpp ffead-cpp-src -mv ffead-cpp-src/lang-server-backends ${IROOT}/ -cd $IROOT - CURR_TYPE="lithium" if [ "$CURR_TYPE" = "lithium" ] then @@ -27,7 +18,6 @@ then CINATRA_INC="-DCINATRA_INCLUDES=${IROOT}/cinatra/include" git clone https://github.com/sumeetchhetri/cinatra.git cd cinatra - git checkout b3871a074f6107f57acf42a15fa872d4076436ab -b works rm -rf .git fi @@ -40,7 +30,6 @@ then SRV_TYPE=SRV_DROGON git clone --recurse-submodules https://github.com/sumeetchhetri/drogon cd drogon - git checkout a10934f3f85f361cde58a891d3cf1f1df3a8ea8a -b works mkdir build cd build cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_CTL=off -DBUILD_EXAMPLES=off -DBUILD_ORM=off .. @@ -54,26 +43,26 @@ if [ "$CURR_TYPE" = "nghttp2" ] then apt install --no-install-recommends -y libjansson-dev libc-ares-dev libboost-all-dev cd $IROOT - wget -q https://github.com/nghttp2/nghttp2/releases/download/v1.41.0/nghttp2-1.41.0.tar.gz - tar xf nghttp2-1.41.0.tar.gz - cd nghttp2-1.41.0 + wget -q https://github.com/nghttp2/nghttp2/releases/download/v1.42.0/nghttp2-1.42.0.tar.gz + tar xf nghttp2-1.42.0.tar.gz + cd nghttp2-1.42.0 cmake -DENABLE_ASIO_LIB=on -GNinja . ninja install cd $IROOT - rm -rf nghttp2-1.41.0 nghttp2-1.41.0.tar.gz + rm -rf nghttp2-1.42.0 nghttp2-1.42.0.tar.gz fi CURR_TYPE="mongols" if [ "$CURR_TYPE" = "mongols" ] then cd $IROOT - wget -q https://github.com/webcpp/mongols/archive/release-1.8.4.9.tar.gz - tar xf release-1.8.4.9.tar.gz - cd mongols-release-1.8.4.9/ + wget -q https://github.com/webcpp/mongols/archive/release-1.8.4.12.tar.gz + tar xf release-1.8.4.12.tar.gz + cd mongols-release-1.8.4.12/ make clean && make -j4 && make install && ldconfig cp -rf inc/mongols/lib/* /usr/local/include/ cd $IROOT - rm -rf mongols-release-1.8.4.9/ release-1.8.4.9.tar.gz + rm -rf mongols-release-1.8.4.12/ release-1.8.4.12.tar.gz fi CURR_TYPE="uv-cpp" @@ -98,7 +87,8 @@ then git clone https://github.com/chronoxor/CppServer cd CppServer gil update - cd build + cd $IROOT/CppServer/modules/CppCommon/modules/fmt && git checkout b9ab5c8836bbffbe0a877f64d6faef8fbf4fd394 -b works + cd $IROOT/CppServer/build ./unix.sh cp $IROOT/CppServer/bin/libcppserver.a /usr/local/lib/ cp -rf $IROOT/CppServer/modules/asio/asio/include/* /usr/local/include/ @@ -111,4 +101,26 @@ then rm -rf CppServer fi +CURR_TYPE="lsquic-no" +if [ "$CURR_TYPE" = "lsquic" ] +then + apt install --no-install-recommends -y libunwind-dev golang libevent-dev + cd $IROOT + git clone https://boringssl.googlesource.com/boringssl + cd boringssl + git checkout b117a3a0b7bd11fe6ebd503ec6b45d6b910b41a1 + sed -i "s/-Werror//g" CMakeLists.txt + cmake -DCMAKE_BUILD_TYPE=Release . && make -j4 + cd $IROOT + git clone https://github.com/litespeedtech/lsquic.git + cd lsquic + git submodule init + git submodule update + cmake -DBORINGSSL_DIR=${IROOT}/boringssl . && make install -j4 + cd $IROOT + rm -rf lsquic boringssl + apt remove -y golang + apt autoremove -y +fi + rm -rf /var/lib/apt/lists/* diff --git a/frameworks/C++/ffead-cpp/install_ffead-cpp-dependencies.sh b/frameworks/C++/ffead-cpp/install_ffead-cpp-dependencies.sh index 2da9120cdb3..313e0355592 100644 --- a/frameworks/C++/ffead-cpp/install_ffead-cpp-dependencies.sh +++ b/frameworks/C++/ffead-cpp/install_ffead-cpp-dependencies.sh @@ -37,21 +37,6 @@ wget -q https://downloads.mysql.com/archives/get/p/10/file/mysql-connector-odbc- dpkg -i mysql-connector-odbc-setup_8.0.21-1ubuntu20.04_amd64.deb rm -f *.deb -#mkdir -p /usr/lib/x86_64-linux-gnu/odbc -#wget -q https://downloads.mysql.com/archives/get/p/10/file/mysql-connector-odbc-8.0.19-linux-ubuntu18.04-x86-64bit.tar.gz -#tar xf mysql-connector-odbc-8.0.19-linux-ubuntu18.04-x86-64bit.tar.gz -#mv mysql-connector-odbc-8.0.19-linux-ubuntu18.04-x86-64bit/lib/libmyodbc8* /usr/lib/x86_64-linux-gnu/odbc/ -#mysql-connector-odbc-8.0.19-linux-ubuntu18.04-x86-64bit/bin/myodbc-installer -d -a -n "MySQL" -t "DRIVER=/usr/lib/x86_64-linux-gnu/odbc/libmyodbc8w.so;" -#rm -f mysql-connector-odbc-8.0.19-linux-ubuntu18.04-x86-64bit.tar.gz -#rm -rf mysql-connector-odbc-8.0.19-linux-ubuntu18.04-x86-64bit - -#wget -q https://cdn.mysql.com/archives/mysql-connector-odbc-5.3/mysql-connector-odbc-5.3.11-linux-ubuntu16.04-x86-64bit.tar.gz -#tar xf mysql-connector-odbc-5.3.11-linux-ubuntu16.04-x86-64bit.tar.gz -#mv mysql-connector-odbc-5.3.11-linux-ubuntu16.04-x86-64bit/lib/libmyodbc5* /usr/lib/x86_64-linux-gnu/odbc/ -#mysql-connector-odbc-5.3.11-linux-ubuntu16.04-x86-64bit/bin/myodbc-installer -d -a -n "MySQL" -t "DRIVER=/usr/lib/x86_64-linux-gnu/odbc/libmyodbc5w.so;" -#rm -f mysql-connector-odbc-5.3.11-linux-ubuntu16.04-x86-64bit.tar.gz -#rm -rf mysql-connector-odbc-5.3.11-linux-ubuntu16.04-x86-64bit - wget -q https://github.com/mongodb/mongo-c-driver/releases/download/1.4.2/mongo-c-driver-1.4.2.tar.gz tar xf mongo-c-driver-1.4.2.tar.gz rm -f mongo-c-driver-1.4.2.tar.gz diff --git a/frameworks/C++/ffead-cpp/install_ffead-cpp-framework.sh b/frameworks/C++/ffead-cpp/install_ffead-cpp-framework.sh index ca634c9d81e..3f08256e755 100644 --- a/frameworks/C++/ffead-cpp/install_ffead-cpp-framework.sh +++ b/frameworks/C++/ffead-cpp/install_ffead-cpp-framework.sh @@ -6,18 +6,26 @@ MAX_THREADS=$(( 3 * `nproc` / 2 )) WRIT_THREADS=$(( $MAX_THREADS / 3 )) SERV_THREADS=$(( $MAX_THREADS - $WRIT_THREADS )) +#git checkout e243bc096cd570cfee1edfecbcd91f4c4056fa1a -b 6.0 +git clone https://github.com/sumeetchhetri/ffead-cpp +cd ffead-cpp +rm -rf .git +cd .. +mv ffead-cpp ffead-cpp-src +mv ffead-cpp-src/lang-server-backends ${IROOT}/ + cd $IROOT/ffead-cpp-src/ chmod 755 *.sh resources/*.sh rtdcf/autotools/*.sh -rm -rf web/te-benchmark-um -rm -rf web/te-benchmark-um-pq -rm -rf web/te-benchmark-um-mgr -rm -rf web/te-benchmark-um-pq-async +#rm -rf web/te-benchmark-um +#rm -rf web/te-benchmark-um-pq +#rm -rf web/te-benchmark-um-mgr +#rm -rf web/te-benchmark-um-pq-async mv ${IROOT}/server.sh script/ -mv ${IROOT}/te-benchmark-um web/ -mv ${IROOT}/te-benchmark-um-pq web/ -mv ${IROOT}/te-benchmark-um-mgr web/ -mv ${IROOT}/te-benchmark-um-pq-async web/ +#mv ${IROOT}/te-benchmark-um web/ +#mv ${IROOT}/te-benchmark-um-pq web/ +#mv ${IROOT}/te-benchmark-um-mgr web/ +#mv ${IROOT}/te-benchmark-um-pq-async web/ sed -i 's|THRD_PSIZ=6|THRD_PSIZ='${SERV_THREADS}'|g' resources/server.prop sed -i 's|W_THRD_PSIZ=2|W_THRD_PSIZ='${WRIT_THREADS}'|g' resources/server.prop sed -i 's|ENABLE_CRS=true|ENABLE_CRS=false|g' resources/server.prop @@ -56,8 +64,8 @@ sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/default/libdefault${LIB_EXT} D sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/flexApp/libflexApp${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/oauthApp/liboauthApp${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/markers/libmarkers${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark/libte_benchmark${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/peer-server/libpeer_server${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt +sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark/libte-benchmark${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt +sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/peer-server/libpeer-server${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt sed -i 's|web/default/src/autotools/Makefile||g' configure.ac sed -i 's|web/flexApp/src/autotools/Makefile||g' configure.ac sed -i 's|web/oauthApp/src/autotools/Makefile||g' configure.ac @@ -83,31 +91,51 @@ rm -f /usr/local/lib/libte_benc* rm -f /usr/local/lib/libinter.so rm -f /usr/local/lib/libdinter.so -if [ ! -d "ffead-cpp-5.0-bin" ] +if [ ! -d "ffead-cpp-6.0-bin" ] then exit 1 fi -cd ffead-cpp-5.0-bin +cd ffead-cpp-6.0-bin #cache related dockerfiles will add the cache.xml accordingly whenever needed chmod 755 *.sh resources/*.sh rtdcf/autotools/*.sh ./server.sh & +COUNTER=0 while [ ! -f lib/libinter.so ] do - sleep 1 + sleep 1 + COUNTER=$((COUNTER+1)) + if [ "$COUNTER" = 120 ] + then + cat logs/jobs.log + echo "ffead-cpp exiting exiting due to failure...." + exit 1 + fi done +COUNTER=0 while [ ! -f lib/libdinter.so ] do - sleep 1 + sleep 1 + COUNTER=$((COUNTER+1)) + if [ "$COUNTER" = 120 ] + then + cat logs/jobs.log + echo "ffead-cpp exiting exiting due to failure....ddlib" + exit 1 + fi done +echo "ffead-cpp start successful" +sleep 5 +cd tests && rm -f test.csv && cp ${IROOT}/ffead-cpp-src/tests/test-te.csv test.csv && chmod +x *.sh && ./runTests.sh +echo "ffead-cpp normal shutdown" pkill ffead-cpp cd ${IROOT}/ffead-cpp-src/ -cp -rf ffead-cpp-5.0-bin ${IROOT}/ffead-cpp-5.0 -rm -rf ffead-cpp-5.0-bin +cp -rf ffead-cpp-6.0-bin ${IROOT}/ffead-cpp-6.0 +rm -rf ffead-cpp-6.0-bin mv ${IROOT}/nginxfc ${IROOT}/nginx-ffead-mongo -cd ${IROOT}/ffead-cpp-5.0 +cd ${IROOT}/ffead-cpp-6.0 chmod 755 *.sh resources/*.sh rtdcf/autotools/*.sh chmod 755 *.sh @@ -122,31 +150,51 @@ cp -f web/te-benchmark-um/sql-src/TeBkUmWorldsql.h web/te-benchmark-um/include/T cp -f web/te-benchmark-um/sql-src/TeBkUmWorldsql.cpp web/te-benchmark-um/src/TeBkUmWorld.cpp make install -j${MAX_THREADS} -if [ ! -d "ffead-cpp-5.0-bin" ] +if [ ! -d "ffead-cpp-6.0-bin" ] then exit 1 fi -cd ffead-cpp-5.0-bin +cd ffead-cpp-6.0-bin #cache related dockerfiles will add the cache.xml accordingly whenever needed chmod 755 *.sh resources/*.sh rtdcf/autotools/*.sh ./server.sh & +COUNTER=0 while [ ! -f lib/libinter.so ] do - sleep 1 + sleep 1 + COUNTER=$((COUNTER+1)) + if [ "$COUNTER" = 120 ] + then + cat logs/jobs.log + echo "ffead-cpp exiting exiting due to failure...." + exit 1 + fi done +COUNTER=0 while [ ! -f lib/libdinter.so ] do - sleep 1 + sleep 1 + COUNTER=$((COUNTER+1)) + if [ "$COUNTER" = 120 ] + then + cat logs/jobs.log + echo "ffead-cpp exiting exiting due to failure....ddlib" + exit 1 + fi done +echo "ffead-cpp start successful" +sleep 5 +cd tests && rm -f test.csv && cp ${IROOT}/ffead-cpp-src/tests/test-te.csv test.csv && chmod +x *.sh && ./runTests.sh +echo "ffead-cpp normal shutdown" pkill ffead-cpp cd ${IROOT}/ffead-cpp-src/ -cp -rf ffead-cpp-5.0-bin ${IROOT}/ffead-cpp-5.0-sql -rm -rf ffead-cpp-5.0-bin +cp -rf ffead-cpp-6.0-bin ${IROOT}/ffead-cpp-6.0-sql +rm -rf ffead-cpp-6.0-bin mv ${IROOT}/nginxfc ${IROOT}/nginx-ffead-sql -cd ${IROOT}/ffead-cpp-5.0-sql +cd ${IROOT}/ffead-cpp-6.0-sql chmod 755 *.sh resources/*.sh rtdcf/autotools/*.sh chmod 755 *.sh diff --git a/frameworks/C++/ffead-cpp/install_ffead-cpp-httpd.sh b/frameworks/C++/ffead-cpp/install_ffead-cpp-httpd.sh index 8afc78fa7df..bdc0372815c 100644 --- a/frameworks/C++/ffead-cpp/install_ffead-cpp-httpd.sh +++ b/frameworks/C++/ffead-cpp/install_ffead-cpp-httpd.sh @@ -2,7 +2,7 @@ cd $IROOT -#chown -R www-data:www-data ffead-cpp-5.0 +#chown -R www-data:www-data ffead-cpp-6.0 #wget -q https://archive.apache.org/dist/httpd/httpd-2.4.25.tar.gz #wget -q https://archive.apache.org/dist/apr/apr-1.5.2.tar.gz @@ -23,13 +23,13 @@ cd $IROOT #make install #cd ${IROOT} -sed -i 's|#define PACKAGE_BUGREPORT "sumeet.chhetri@gmail.com"| |g' ${IROOT}/ffead-cpp-5.0/include/AppDefines.h -sed -i 's|#define PACKAGE_NAME "ffead-cpp"| |g' ${IROOT}/ffead-cpp-5.0/include/AppDefines.h -sed -i 's|#define PACKAGE_STRING "ffead-cpp 5.0"| |g' ${IROOT}/ffead-cpp-5.0/include/AppDefines.h -sed -i 's|#define PACKAGE_TARNAME "ffead-cpp"| |g' ${IROOT}/ffead-cpp-5.0/include/AppDefines.h -sed -i 's|#define PACKAGE_VERSION "5.0"| |g' ${IROOT}/ffead-cpp-5.0/include/AppDefines.h +sed -i 's|#define PACKAGE_BUGREPORT "sumeet.chhetri@gmail.com"| |g' ${IROOT}/ffead-cpp-6.0/include/AppDefines.h +sed -i 's|#define PACKAGE_NAME "ffead-cpp"| |g' ${IROOT}/ffead-cpp-6.0/include/AppDefines.h +sed -i 's|#define PACKAGE_STRING "ffead-cpp 6.0"| |g' ${IROOT}/ffead-cpp-6.0/include/AppDefines.h +sed -i 's|#define PACKAGE_TARNAME "ffead-cpp"| |g' ${IROOT}/ffead-cpp-6.0/include/AppDefines.h +sed -i 's|#define PACKAGE_VERSION "6.0"| |g' ${IROOT}/ffead-cpp-6.0/include/AppDefines.h -FFEADROOT=${IROOT}/ffead-cpp-5.0 +FFEADROOT=${IROOT}/ffead-cpp-6.0 ETROOT=${FFEADROOT//\//\\/} EIROOT=${IROOT//\//\\/} diff --git a/frameworks/C++/ffead-cpp/install_ffead-cpp-nginx.sh b/frameworks/C++/ffead-cpp/install_ffead-cpp-nginx.sh index 135576f34ea..6922d4bc131 100644 --- a/frameworks/C++/ffead-cpp/install_ffead-cpp-nginx.sh +++ b/frameworks/C++/ffead-cpp/install_ffead-cpp-nginx.sh @@ -3,6 +3,6 @@ cd $IROOT cp ${IROOT}/ffead-cpp-src/modules/nginx_mod_ffeadcpp/nginx.conf ${IROOT}/nginx-ffead-mongo/conf/ -sed -i 's|FFEAD_PATH|'${IROOT}/ffead-cpp-5.0'|g' ${IROOT}/nginx-ffead-mongo/conf/nginx.conf +sed -i 's|FFEAD_PATH|'${IROOT}/ffead-cpp-6.0'|g' ${IROOT}/nginx-ffead-mongo/conf/nginx.conf cp ${IROOT}/ffead-cpp-src/modules/nginx_mod_ffeadcpp/nginx.conf ${IROOT}/nginx-ffead-sql/conf/ -sed -i 's|FFEAD_PATH|'${IROOT}/ffead-cpp-5.0-sql'|g' ${IROOT}/nginx-ffead-sql/conf/nginx.conf +sed -i 's|FFEAD_PATH|'${IROOT}/ffead-cpp-6.0-sql'|g' ${IROOT}/nginx-ffead-sql/conf/nginx.conf diff --git a/frameworks/C++/ffead-cpp/install_ffead-cpp-sql-raw-profiled.sh b/frameworks/C++/ffead-cpp/install_ffead-cpp-sql-raw-profiled.sh index 349e7344355..11bbe4d026c 100644 --- a/frameworks/C++/ffead-cpp/install_ffead-cpp-sql-raw-profiled.sh +++ b/frameworks/C++/ffead-cpp/install_ffead-cpp-sql-raw-profiled.sh @@ -22,7 +22,7 @@ do then cat ffead.log cat logs/jobs.log - echo "exiting...." + echo "ffead-cpp exiting exiting due to failure...." exit 1 fi done @@ -35,10 +35,15 @@ do then cat ffead.log cat logs/jobs.log - echo "exiting....dlib" + echo "ffead-cpp exiting exiting due to failure....dlib" exit 1 fi done +echo "ffead-cpp start successful" +sleep 5 +#cd tests && chmod +x *.sh && ./runTests.sh +#echo "ffead-cpp normal shutdown" +#cd - rm -f serv.ctrl pkill ffead-cpp @@ -47,6 +52,7 @@ service postgresql start #For profiling/benchmarking sed -i 's|EVH_SINGLE=false|EVH_SINGLE=true|g' resources/server.prop +#sed -i 's|LOGGING_ENABLED=false|LOGGING_ENABLED=true|g' resources/server.prop nohup bash -c "./server.sh > ffead.log &" sleep 10 echo "ffead-cpp with sql-raw support launched" diff --git a/frameworks/C++/ffead-cpp/install_ffead-cpp-sql-raw-v-picov-profiled.sh b/frameworks/C++/ffead-cpp/install_ffead-cpp-sql-raw-v-picov-profiled.sh new file mode 100644 index 00000000000..e2281d23f38 --- /dev/null +++ b/frameworks/C++/ffead-cpp/install_ffead-cpp-sql-raw-v-picov-profiled.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +export FFEAD_CPP_PATH=${IROOT}/ffead-cpp-6.0-sql +export LD_LIBRARY_PATH=${IROOT}/:${IROOT}/lib:${FFEAD_CPP_PATH}/lib:/usr/local/lib:$LD_LIBRARY_PATH + +cd $IROOT/lang-server-backends/v/pico.v + +cp -f ${FFEAD_CPP_PATH}/web/te-benchmark-um-pq/config/cachememory.xml ${FFEAD_CPP_PATH}/web/te-benchmark-um-pq/config/cache.xml +sed -i 's|EVH_SINGLE=false|EVH_SINGLE=true|g' ${FFEAD_CPP_PATH}/resources/server.prop +#sed -i 's|LOGGING_ENABLED=false|LOGGING_ENABLED=true|g' ${FFEAD_CPP_PATH}/resources/server.prop +nohup bash -c "./main --server_dir=$FFEAD_CPP_PATH --server_port=8080 > ffead.log &" +sleep 10 +echo "ffead-cpp-v-picov with sql-raw support launched" +wrk -H 'Host: localhost' -H 'Accept: application/json,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7' \ + -H 'Connection: keep-alive' --latency -d 5 -c 256 --timeout 8 -t 2 "http://localhost:8080/plaintext" +wrk -H 'Host: localhost' -H 'Accept: application/json,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7' \ + -H 'Connection: keep-alive' --latency -d 5 -c 256 --timeout 8 -t 2 "http://localhost:8080/te-benchmark-um-pq/json" +wrk -H 'Host: localhost' -H 'Accept: application/json,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7' \ + -H 'Connection: keep-alive' --latency -d 5 -c 256 --timeout 8 -t 2 "http://localhost:8080/te-benchmark-um-pq/fortunes" +wrk -H 'Host: localhost' -H 'Accept: application/json,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7' \ + -H 'Connection: keep-alive' --latency -d 5 -c 512 --timeout 8 -t 2 "http://localhost:8080/te-benchmark-um-pq/db" +wrk -H 'Host: localhost' -H 'Accept: application/json,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7' \ + -H 'Connection: keep-alive' --latency -d 5 -c 512 --timeout 8 -t 2 "http://localhost:8080/te-benchmark-um-pq/queries?queries=20" +wrk -H 'Host: localhost' -H 'Accept: application/json,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7' \ + -H 'Connection: keep-alive' --latency -d 5 -c 512 --timeout 8 -t 2 "http://localhost:8080/te-benchmark-um-pq/updates?queries=20" +wrk -H 'Host: localhost' -H 'Accept: application/json,text/html;q=0.9,application/xhtml+xml;q=0.9,application/xml;q=0.8,*/*;q=0.7' \ + -H 'Connection: keep-alive' --latency -d 5 -c 512 --timeout 8 -t 2 "http://localhost:8080/te-benchmark-um-pq/cached-worlds?count=20" +echo "normal shutdown" +pkill main + +cd / + +exit 0 diff --git a/frameworks/C++/ffead-cpp/run_ffead.sh b/frameworks/C++/ffead-cpp/run_ffead.sh index 79b622b3694..3fd9afca80e 100644 --- a/frameworks/C++/ffead-cpp/run_ffead.sh +++ b/frameworks/C++/ffead-cpp/run_ffead.sh @@ -7,7 +7,10 @@ rm -f /usr/local/lib/libdinter.so export FFEAD_CPP_PATH=${IROOT}/$1 -ln -s ${FFEAD_CPP_PATH}/lib/libte_benchmark_um.so /usr/local/lib/libte_benchmark_um.so +ln -s ${FFEAD_CPP_PATH}/lib/libte-benchmark-um.so /usr/local/lib/libte-benchmark-um.so +ln -s ${FFEAD_CPP_PATH}/lib/libte-benchmark-um-pq.so /usr/local/lib/libte-benchmark-um-pq.so +ln -s ${FFEAD_CPP_PATH}/lib/libte-benchmark-um-mgr.so /usr/local/lib/libte-benchmark-um-mgr.so +ln -s ${FFEAD_CPP_PATH}/lib/libte-benchmark-um-pq-async.so /usr/local/lib/libte-benchmark-um-pq-async.so ln -s ${FFEAD_CPP_PATH}/lib/libffead-modules.so /usr/local/lib/libffead-modules.so ln -s ${FFEAD_CPP_PATH}/lib/libffead-framework.so /usr/local/lib/libffead-framework.so ln -s ${FFEAD_CPP_PATH}/lib/libinter.so /usr/local/lib/libinter.so @@ -117,8 +120,8 @@ elif [ "$2" = "apache" ] then if [ "$3" = "mysql" ] || [ "$3" = "postgresql" ] then - sed -i 's|/installs/ffead-cpp-5.0|'/installs/ffead-cpp-5.0-sql'|g' /etc/apache2/apache2.conf - sed -i 's|/installs/ffead-cpp-5.0|'/installs/ffead-cpp-5.0-sql'|g' /etc/apache2/sites-enabled/000-default.conf /etc/apache2/sites-enabled/ffead-site.conf + sed -i 's|/installs/ffead-cpp-6.0|'/installs/ffead-cpp-6.0-sql'|g' /etc/apache2/apache2.conf + sed -i 's|/installs/ffead-cpp-6.0|'/installs/ffead-cpp-6.0-sql'|g' /etc/apache2/sites-enabled/000-default.conf /etc/apache2/sites-enabled/ffead-site.conf fi sed -i 's|30|3|g' web/te-benchmark-um/config/sdorm.xml sed -i 's|10|2|g' web/te-benchmark-um/config/cache.xml @@ -237,6 +240,10 @@ then cd ${IROOT} java -Xmx2G -Xms2G -server -XX:+UseNUMA -XX:+UseParallelGC -XX:+AggressiveOpts \ -jar wizzardo-ffead-cpp-all-1.0.jar $FFEAD_CPP_PATH 8080 env=prod +elif [ "$2" = "seastar" ] +then + cd ${IROOT}/lang-server-backends/c++/seastar + ./ffead-cpp-seastar --port=8080 --address=0.0.0.0 --fcpdir=${FFEAD_CPP_PATH} -c$(nproc) fi wait diff --git a/frameworks/C++/ffead-cpp/sql-async-profiled-install-clang-dbg.sh b/frameworks/C++/ffead-cpp/sql-async-profiled-install-clang-dbg.sh new file mode 100644 index 00000000000..3b978e368a0 --- /dev/null +++ b/frameworks/C++/ffead-cpp/sql-async-profiled-install-clang-dbg.sh @@ -0,0 +1,21 @@ +cd $IROOT/ffead-cpp-src/ + +rm -rf build +mkdir build +cd build +CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake -DSRV_EMB=on -DMOD_REDIS=on -DDEBUG=on .. +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw + +#Start postgresql +service postgresql stop +#For profiling/benchmarking + +cd $IROOT/ +sed -i 's|cmake |CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +#sed -i 's|-fprofile-instr-generate=/tmp/cprof.prof|-fprofile-instr-generate=/tmp/cprofdi.prof|g' $IROOT/ffead-cpp-sql-raw/rtdcf/CMakeLists.txt.template +apt update -yqq && apt install -yqq vim gdb net-tools telnet iputils-ping +./install_ffead-cpp-sql-raw-profiled.sh async + +#mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-6.0-sql +#sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-6.0-sql/web/te-benchmark-um-pq-async/config/sdorm.xml + diff --git a/frameworks/C++/ffead-cpp/sql-async-profiled-install-clang.sh b/frameworks/C++/ffead-cpp/sql-async-profiled-install-clang.sh index f406ebd3395..1e94a15b723 100644 --- a/frameworks/C++/ffead-cpp/sql-async-profiled-install-clang.sh +++ b/frameworks/C++/ffead-cpp/sql-async-profiled-install-clang.sh @@ -1,47 +1,17 @@ -mkdir /tmp/profile-data - -rm -rf $IROOT/ffead-cpp-5.0-sql - -if [ "$1" = "batch" ] -then - apt remove -yqq libpq-dev - apt autoremove -yqq - apt update && apt install -y bison flex libreadline-dev - cd /tmp - wget -q https://github.com/an-tao/postgres/archive/batch_mode_ubuntu.tar.gz - tar -xzf batch_mode_ubuntu.tar.gz - cd postgres-batch_mode_ubuntu - ./configure --prefix=/usr CFLAGS='-O2 -pipe -march=native' - make && make install -fi - -apt update -yqq && apt install -yqq clang - cd $IROOT/ffead-cpp-src/ -rm -rf CMakeCache.txt CMakeFiles -rm -rf web/te-benchmark-um web/te-benchmark-um-mgr web/te-benchmark-um-pq - -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-mgr)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-pq)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um/libte_benchmark_um${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-mgr/libte_benchmark_um_mgr${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-pq/libte_benchmark_um_pq${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt - -sed -i 's|tfb-database|localhost|g' $IROOT/ffead-cpp-src/web/te-benchmark-um-pq-async/config/sdorm.xml rm -rf build mkdir build cd build CC=/usr/bin/clang CXX=/usr/bin/clang++ CXXFLAGS="-march=native -flto -fprofile-instr-generate=/tmp/cprof.prof" cmake -DSRV_EMB=on -DMOD_REDIS=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -sed -i 's|cmake .|CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +sed -i 's|cmake |CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh #sed -i 's|-fprofile-instr-generate=/tmp/cprof.prof|-fprofile-instr-generate=/tmp/cprofdi.prof|g' $IROOT/ffead-cpp-sql-raw/rtdcf/CMakeLists.txt.template ./install_ffead-cpp-sql-raw-profiled.sh async rm -rf $IROOT/ffead-cpp-sql-raw @@ -53,20 +23,20 @@ cd build llvm-profdata-10 merge -output=/tmp/cprof.pgo /tmp/cprof.prof #llvm-profdata-10 merge -output=/tmp/cprofdi.pgo /tmp/cprofdi.prof ls -ltr /tmp/cprof* -CC=/usr/bin/clang CXX=/usr/bin/clang++ CXXFLAGS="-march=native -flto -fprofile-instr-use=/tmp/cprof.pgo" cmake -DSRV_EMB=on -DMOD_MEMCACHED=on -DMOD_REDIS=on -DMOD_SDORM_MONGO=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +CC=/usr/bin/clang CXX=/usr/bin/clang++ CXXFLAGS="-march=native -flto -fprofile-instr-use=/tmp/cprof.pgo" cmake -DSRV_EMB=on -DMOD_REDIS=on .. +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -sed -i 's|cmake .|CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +sed -i 's|cmake |CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh #sed -i 's|-fprofile-instr-use=/tmp/cprof.pgo|-fprofile-instr-use=/tmp/cprofdi.pgo|g' $IROOT/ffead-cpp-sql-raw/rtdcf/CMakeLists.txt.template ./install_ffead-cpp-sql-raw-profiled.sh async -mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-5.0-sql +mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-6.0-sql -sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-5.0-sql/web/te-benchmark-um-pq-async/config/sdorm.xml +sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-6.0-sql/web/te-benchmark-um-pq-async/config/sdorm.xml apt remove -yqq postgresql-13 postgresql-contrib-13 gnupg lsb-release apt autoremove -yqq diff --git a/frameworks/C++/ffead-cpp/sql-async-profiled-install.sh b/frameworks/C++/ffead-cpp/sql-async-profiled-install.sh index d05630909e8..df1a2d1847d 100644 --- a/frameworks/C++/ffead-cpp/sql-async-profiled-install.sh +++ b/frameworks/C++/ffead-cpp/sql-async-profiled-install.sh @@ -1,45 +1,17 @@ -mkdir /tmp/profile-data - -rm -rf $IROOT/ffead-cpp-5.0-sql - -if [ "$1" = "batch" ] -then - apt remove -yqq libpq-dev - apt autoremove -yqq - apt update && apt install -y bison flex libreadline-dev - cd /tmp - wget -q https://github.com/an-tao/postgres/archive/batch_mode_ubuntu.tar.gz - tar -xzf batch_mode_ubuntu.tar.gz - cd postgres-batch_mode_ubuntu - ./configure --prefix=/usr CFLAGS='-O2 -pipe -march=native' - make && make install -fi - cd $IROOT/ffead-cpp-src/ -rm -rf CMakeCache.txt CMakeFiles -rm -rf web/te-benchmark-um web/te-benchmark-um-mgr web/te-benchmark-um-pq - -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-mgr)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-pq)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um/libte_benchmark_um${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-mgr/libte_benchmark_um_mgr${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-pq/libte_benchmark_um_pq${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt - -sed -i 's|tfb-database|localhost|g' $IROOT/ffead-cpp-src/web/te-benchmark-um-pq-async/config/sdorm.xml rm -rf build mkdir build cd build CXXFLAGS="-march=native -flto -fprofile-dir=/tmp/profile-data -fprofile-generate" cmake -DSRV_EMB=on -DMOD_REDIS=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -#sed -i 's|cmake .|cmake -DCMAKE_EXE_LINKER_FLAGS="-fprofile-dir=/tmp/profile-data -fprofile-generate" -DCMAKE_CXX_FLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-generate" .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +#sed -i 's|cmake |cmake -DCMAKE_EXE_LINKER_FLAGS="-fprofile-dir=/tmp/profile-data -fprofile-generate" -DCMAKE_CXX_FLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-generate" |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh ./install_ffead-cpp-sql-raw-profiled.sh async rm -rf $IROOT/ffead-cpp-sql-raw @@ -47,19 +19,19 @@ cd $IROOT/ffead-cpp-src rm -rf build mkdir build cd build -CXXFLAGS="-march=native -flto -fprofile-dir=/tmp/profile-data -fprofile-use=/tmp/profile-data -fprofile-correction" cmake -DSRV_EMB=on -DMOD_MEMCACHED=on -DMOD_REDIS=on -DMOD_SDORM_MONGO=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +CXXFLAGS="-march=native -flto -fprofile-dir=/tmp/profile-data -fprofile-use=/tmp/profile-data -fprofile-correction" cmake -DSRV_EMB=on -DMOD_REDIS=on .. +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -#sed -i 's|cmake .|CXXFLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-use -fprofile-correction" cmake .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +#sed -i 's|cmake |CXXFLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-use -fprofile-correction" cmake |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh ./install_ffead-cpp-sql-raw-profiled.sh async -mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-5.0-sql +mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-6.0-sql -sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-5.0-sql/web/te-benchmark-um-pq-async/config/sdorm.xml +sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-6.0-sql/web/te-benchmark-um-pq-async/config/sdorm.xml apt remove -yqq postgresql-13 postgresql-contrib-13 gnupg lsb-release apt autoremove -yqq diff --git a/frameworks/C++/ffead-cpp/sql-profiled-install-clang.sh b/frameworks/C++/ffead-cpp/sql-profiled-install-clang.sh index 0279b761354..f7afae44f6e 100644 --- a/frameworks/C++/ffead-cpp/sql-profiled-install-clang.sh +++ b/frameworks/C++/ffead-cpp/sql-profiled-install-clang.sh @@ -1,47 +1,17 @@ -mkdir /tmp/profile-data - -rm -rf $IROOT/ffead-cpp-5.0-sql - -if [ "$1" = "batch" ] -then - apt remove -yqq libpq-dev - apt autoremove -yqq - apt update && apt install -y bison flex libreadline-dev - cd /tmp - wget -q https://github.com/an-tao/postgres/archive/batch_mode_ubuntu.tar.gz - tar -xzf batch_mode_ubuntu.tar.gz - cd postgres-batch_mode_ubuntu - ./configure --prefix=/usr CFLAGS='-O2 -pipe -march=native' - make && make install -fi - -apt update -yqq && apt install -yqq clang - cd $IROOT/ffead-cpp-src/ -rm -rf CMakeCache.txt CMakeFiles -rm -rf web/te-benchmark-um web/te-benchmark-um-mgr web/te-benchmark-um-pq-async - -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-mgr)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-pq-async)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um/libte_benchmark_um${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-mgr/libte_benchmark_um_mgr${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-pq-async/libte_benchmark_um_pq_async${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt - -sed -i 's|tfb-database|localhost|g' $IROOT/ffead-cpp-src/web/te-benchmark-um-pq/config/sdorm.xml rm -rf build mkdir build cd build CC=/usr/bin/clang CXX=/usr/bin/clang++ CXXFLAGS="-march=native -flto -fprofile-instr-generate=/tmp/cprof.prof" cmake -DSRV_EMB=on -DMOD_REDIS=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -sed -i 's|cmake .|CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +sed -i 's|cmake |CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh #sed -i 's|-fprofile-instr-generate=/tmp/cprof.prof|-fprofile-instr-generate=/tmp/cprofdi.prof|g' $IROOT/ffead-cpp-sql-raw/rtdcf/CMakeLists.txt.template ./install_ffead-cpp-sql-raw-profiled.sh rm -rf $IROOT/ffead-cpp-sql-raw @@ -53,20 +23,20 @@ cd build llvm-profdata-10 merge -output=/tmp/cprof.pgo /tmp/cprof.prof #llvm-profdata-10 merge -output=/tmp/cprofdi.pgo /tmp/cprofdi.prof ls -ltr /tmp/cprof* -CC=/usr/bin/clang CXX=/usr/bin/clang++ CXXFLAGS="-march=native -flto -fprofile-instr-use=/tmp/cprof.pgo" cmake -DSRV_EMB=on -DMOD_MEMCACHED=on -DMOD_REDIS=on -DMOD_SDORM_MONGO=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +CC=/usr/bin/clang CXX=/usr/bin/clang++ CXXFLAGS="-march=native -flto -fprofile-instr-use=/tmp/cprof.pgo" cmake -DSRV_EMB=on -DMOD_REDIS=on .. +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -sed -i 's|cmake .|CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +sed -i 's|cmake |CC=/usr/bin/clang CXX=/usr/bin/clang++ cmake |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh #sed -i 's|-fprofile-instr-use=/tmp/cprof.pgo|-fprofile-instr-use=/tmp/cprofdi.pgo|g' $IROOT/ffead-cpp-sql-raw/rtdcf/CMakeLists.txt.template ./install_ffead-cpp-sql-raw-profiled.sh -mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-5.0-sql +mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-6.0-sql -sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-5.0-sql/web/te-benchmark-um-pq/config/sdorm.xml +sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-6.0-sql/web/te-benchmark-um-pq/config/sdorm.xml apt remove -yqq postgresql-13 postgresql-contrib-13 gnupg lsb-release apt autoremove -yqq diff --git a/frameworks/C++/ffead-cpp/sql-profiled-install.sh b/frameworks/C++/ffead-cpp/sql-profiled-install.sh index fad97956c28..e1465d99cf0 100644 --- a/frameworks/C++/ffead-cpp/sql-profiled-install.sh +++ b/frameworks/C++/ffead-cpp/sql-profiled-install.sh @@ -1,45 +1,17 @@ -mkdir /tmp/profile-data - -rm -rf $IROOT/ffead-cpp-5.0-sql - -if [ "$1" = "batch" ] -then - apt remove -yqq libpq-dev - apt autoremove -yqq - apt update && apt install -y bison flex libreadline-dev - cd /tmp - wget -q https://github.com/an-tao/postgres/archive/batch_mode_ubuntu.tar.gz - tar -xzf batch_mode_ubuntu.tar.gz - cd postgres-batch_mode_ubuntu - ./configure --prefix=/usr CFLAGS='-O2 -pipe -march=native' - make && make install -fi - cd $IROOT/ffead-cpp-src/ -rm -rf CMakeCache.txt CMakeFiles -rm -rf web/te-benchmark-um web/te-benchmark-um-mgr web/te-benchmark-um-pq-async - -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-mgr)||g' CMakeLists.txt -sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-pq-async)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um/libte_benchmark_um${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-mgr/libte_benchmark_um_mgr${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt -sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-pq-async/libte_benchmark_um_pq_async${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt - -sed -i 's|tfb-database|localhost|g' $IROOT/ffead-cpp-src/web/te-benchmark-um-pq/config/sdorm.xml rm -rf build mkdir build cd build CXXFLAGS="-march=native -flto -fprofile-dir=/tmp/profile-data -fprofile-generate" cmake -DSRV_EMB=on -DMOD_REDIS=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -#sed -i 's|cmake .|cmake -DCMAKE_EXE_LINKER_FLAGS="-fprofile-dir=/tmp/profile-data -fprofile-generate" -DCMAKE_CXX_FLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-generate" .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +#sed -i 's|cmake |cmake -DCMAKE_EXE_LINKER_FLAGS="-fprofile-dir=/tmp/profile-data -fprofile-generate" -DCMAKE_CXX_FLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-generate" |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh ./install_ffead-cpp-sql-raw-profiled.sh rm -rf $IROOT/ffead-cpp-sql-raw @@ -47,19 +19,19 @@ cd $IROOT/ffead-cpp-src rm -rf build mkdir build cd build -CXXFLAGS="-march=native -flto -fprofile-dir=/tmp/profile-data -fprofile-use=/tmp/profile-data -fprofile-correction" cmake -DSRV_EMB=on -DMOD_MEMCACHED=on -DMOD_REDIS=on -DMOD_SDORM_MONGO=on .. -make install && mv $IROOT/ffead-cpp-src/ffead-cpp-5.0-bin $IROOT/ffead-cpp-sql-raw +CXXFLAGS="-march=native -flto -fprofile-dir=/tmp/profile-data -fprofile-use=/tmp/profile-data -fprofile-correction" cmake -DSRV_EMB=on -DMOD_REDIS=on .. +make install && mv $IROOT/ffead-cpp-src/ffead-cpp-6.0-bin $IROOT/ffead-cpp-sql-raw #Start postgresql service postgresql stop #For profiling/benchmarking cd $IROOT/ -#sed -i 's|cmake .|CXXFLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-use -fprofile-correction" cmake .|g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh +#sed -i 's|cmake |CXXFLAGS="-march=native -fprofile-dir=/tmp/profile-data -fprofile-use -fprofile-correction" cmake |g' $IROOT/ffead-cpp-sql-raw/resources/rundyn-automake.sh ./install_ffead-cpp-sql-raw-profiled.sh -mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-5.0-sql +mv $IROOT/ffead-cpp-sql-raw $IROOT/ffead-cpp-6.0-sql -sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-5.0-sql/web/te-benchmark-um-pq/config/sdorm.xml +sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-6.0-sql/web/te-benchmark-um-pq/config/sdorm.xml apt remove -yqq postgresql-13 postgresql-contrib-13 gnupg lsb-release apt autoremove -yqq diff --git a/frameworks/C++/ffead-cpp/sql-profiled-util.sh b/frameworks/C++/ffead-cpp/sql-profiled-util.sh new file mode 100644 index 00000000000..e8422fbe24f --- /dev/null +++ b/frameworks/C++/ffead-cpp/sql-profiled-util.sh @@ -0,0 +1,54 @@ +mkdir /tmp/profile-data + +rm -rf $IROOT/ffead-cpp-6.0-sql + +if [ "$1" = "batch" ] +then + apt remove -yqq libpq-dev + apt autoremove -yqq + rm -f /usr/lib/x86_64-linux-gnu/libpq.* + apt update && apt install -y bison flex libreadline-dev + cd /tmp + #wget -q https://github.com/an-tao/postgres/archive/batch_mode_ubuntu.tar.gz + #tar -xzf batch_mode_ubuntu.tar.gz + #cd postgres-batch_mode_ubuntu + #./configure --prefix=/usr CFLAGS='-O2 -pipe -march=native' + #make && make install + wget -nv https://github.com/postgres/postgres/archive/b787d4ce6d910080065025bcd5f968544997271f.zip + unzip -q b787d4ce6d910080065025bcd5f968544997271f.zip + cd postgres-b787d4ce6d910080065025bcd5f968544997271f + wget -nv https://www.postgresql.org/message-id/attachment/115223/v22-0001-libpq-batch.patch + git apply ./v22-0001-libpq-batch.patch + ./configure --prefix=/usr CFLAGS='-O3 -march=native -flto' + cd src/interfaces/libpq + make all install -j4 + cp ../../../src/include/postgres_ext.h ../../../src/include/pg_config_ext.h libpq-fe.h /usr/include +fi + +if [ "$2" = "clang" ] +then + apt update -yqq && apt install -yqq clang +fi + +cd $IROOT/ffead-cpp-src/ +rm -rf $IROOT/ffead-cpp-sql-raw +rm -rf CMakeCache.txt CMakeFiles +rm -rf web/te-benchmark-um web/te-benchmark-um-mgr + +sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um)||g' CMakeLists.txt +sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-mgr)||g' CMakeLists.txt +sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um/libte-benchmark-um${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt +sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-mgr/libte-benchmark-um-mgr${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt + +if [ "$3" = "async" ] +then + sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-pq)||g' CMakeLists.txt + sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-pq/libte-benchmark-um-pq${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt + sed -i 's|tfb-database|localhost|g' $IROOT/ffead-cpp-src/web/te-benchmark-um-pq-async/config/sdorm.xml + rm -rf web/te-benchmark-um-pq +else + sed -i 's|add_subdirectory(${PROJECT_SOURCE_DIR}/web/te-benchmark-um-pq-async)||g' CMakeLists.txt + sed -i 's|install(FILES ${PROJECT_BINARY_DIR}/web/te-benchmark-um-pq-async/libte-benchmark-um-pq-async${LIB_EXT} DESTINATION ${PROJECT_NAME}-bin/lib)||g' CMakeLists.txt + sed -i 's|tfb-database|localhost|g' $IROOT/ffead-cpp-src/web/te-benchmark-um-pq/config/sdorm.xml + rm -rf web/te-benchmark-um-pq-async +fi diff --git a/frameworks/C++/ffead-cpp/sql-v-picov-profiled-install.sh b/frameworks/C++/ffead-cpp/sql-v-picov-profiled-install.sh new file mode 100644 index 00000000000..f50a431ed52 --- /dev/null +++ b/frameworks/C++/ffead-cpp/sql-v-picov-profiled-install.sh @@ -0,0 +1,30 @@ +export FFEAD_CPP_PATH=${IROOT}/ffead-cpp-6.0-sql +export LD_LIBRARY_PATH=${IROOT}/:${IROOT}/lib:${FFEAD_CPP_PATH}/lib:/usr/local/lib:$LD_LIBRARY_PATH + +sed -i 's|tfb-database|localhost|g' $IROOT/ffead-cpp-6.0-sql/web/te-benchmark-um-pq/config/sdorm.xml + +cd $IROOT/lang-server-backends/v/pico.v +v -prod -cflags '-std=gnu11 -Wall -O3 -march=native -mtune=native -no-pie -flto -fprofile-dir=/tmp/profile-data -fprofile-generate -lgcov --coverage' main.v + +#Start postgresql +service postgresql start +#For profiling/benchmarking + +cd $IROOT/ +./install_ffead-cpp-sql-raw-v-picov-profiled.sh + +cd $IROOT/lang-server-backends/v/pico.v +v -prod -cflags '-std=gnu11 -Wall -O3 -march=native -mtune=native -no-pie -flto -fprofile-dir=/tmp/profile-data -fprofile-use=/tmp/profile-data -fprofile-correction -lgcov --coverage' main.v + +cd $IROOT/ +./install_ffead-cpp-sql-raw-v-picov-profiled.sh + +mv $IROOT/lang-server-backends/v/pico.v/main $IROOT/ + +sed -i 's|localhost|tfb-database|g' $IROOT/ffead-cpp-6.0-sql/web/te-benchmark-um-pq/config/sdorm.xml + +apt remove -yqq postgresql-13 postgresql-contrib-13 gnupg lsb-release +apt autoremove -yqq +rm -rf /ssd/postgresql +rm -rf /tmp/postgresql +rm -rf /tmp/wrk /usr/local/bin/wrk diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/CMakeLists.txt b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/CMakeLists.txt index 2b4cc086955..b7d99d0455a 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/CMakeLists.txt +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/CMakeLists.txt @@ -5,6 +5,9 @@ file(GLOB sources ) include_directories("${CMAKE_SOURCE_DIR}/web/te-benchmark-um-mgr/include") -add_library(te_benchmark_um_mgr SHARED ${sources}) -set_property(TARGET te_benchmark_um_mgr PROPERTY POSITION_INDEPENDENT_CODE ON) -target_link_libraries(te_benchmark_um_mgr ffead-modules ffead-framework ${HAVE_PQLIB} ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) +if(BUILD_STATIC_LIBS OR EMSCRIPTEN) + add_library(te-benchmark-um-mgr STATIC ${sources}) +else() + add_library(te-benchmark-um-mgr ${sources}) +endif() +target_link_libraries(te-benchmark-um-mgr ffead-modules ffead-framework ${HAVE_PQLIB} ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/config/application.xml b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/config/application.xml index 05171dc732b..852ff9a44ba 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/config/application.xml +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/config/application.xml @@ -1,4 +1,4 @@ - + * GET, POST, HEAD, PUT, DELETE diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/config/cache.xml b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/config/cache.xml deleted file mode 100644 index a06b17de6de..00000000000 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/config/cache.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - sumeet - sumeet - - - TeBkUmMgrRouter.updateCache - Memory-Cached - memory - - \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/include/TeBkUmMgr.h b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/include/TeBkUmMgr.h index 61b2487e1be..3f3be8cd461 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/include/TeBkUmMgr.h +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/include/TeBkUmMgr.h @@ -43,8 +43,6 @@ #include "yuarel.h" #include "Router.h" -typedef void (*TeBkUmMgrTemplatePtr) (Context*, std::string&); - class TeBkUmMgrWorld { int id; int randomNumber; @@ -83,8 +81,11 @@ class TeBkUmMgrRouter : public Router { static std::string WORLD; static std::string FORTUNE; - static std::string APP_NAME; - static std::string TPE_FN_NAME; + static TemplatePtr tmplFunc; + + static Ser m_ser; + static Ser w_ser; + static SerCont wcont_ser; bool strToNum(const char* str, int len, int& ret); @@ -111,7 +112,7 @@ class TeBkUmMgrRouter : public Router { TeBkUmMgrRouter(); virtual ~TeBkUmMgrRouter(); void updateCache(); - bool route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif); + bool route(HttpRequest* req, HttpResponse* res, SocketInterface* sif); }; #endif /* WEB_TE_BENCHMARK_UM_INCLUDE_TeBkUmMgr_H_ */ diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/meson.build b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/meson.build new file mode 100644 index 00000000000..7199d7e3621 --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/meson.build @@ -0,0 +1,15 @@ + +module_includes = ['/usr/local/include','../../src/modules/common','../../src/modules/cache','../../src/modules/cache/memory','../../src/modules/cache/redis', + '../../src/modules/cache/memcached','../../src/modules/client-util','../../src/modules/http','../../src/modules/http/http11','../../src/modules/http/http20', + '../../src/modules/ioc','../../src/modules/jobs','../../src/modules/reflection','../../src/modules/sdorm','../../src/modules/sdorm/sql','../../src/modules/sdorm/sql/libpq', + '../../src/modules/client-util/ssl','../../src/modules/sdorm/mongo','../../src/modules/sdorm/mongo/raw','../../src/modules/search','../../src/modules/serialization', + '../../src/modules/serialization/xml','../../src/modules/serialization/json','../../src/modules/server-util','../../src/modules/ssl','../../src/modules/threads', + '../../src/framework','include'] +module_includes += global_includes +module_libs = [global_libs, libbson, libmongoc] + +c = run_command(meson_grabber, 'src/') +module_sources = c.stdout().strip().split('\n') + +shared_library('te-benchmark-um-mgr', module_sources, include_directories: module_includes, dependencies: module_libs, link_with: [libffeadmodules, libffeadframework], + install: true, install_dir: bin_dir+'/lib') \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/TeBkUmMgr.cpp b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/TeBkUmMgr.cpp index 55a5d7d70eb..24424822515 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/TeBkUmMgr.cpp +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/TeBkUmMgr.cpp @@ -214,6 +214,7 @@ void TeBkUmMgrRouter::updateCache() { cchi->setRaw(CastUtil::fromNumber(w.getId()), str); } CacheManager::cleanImpl(cchi); + CacheManager::triggerAppInitCompletion(); } catch(const std::exception& e) { CacheManager::cleanImpl(cchi); throw e; @@ -332,106 +333,60 @@ bool TeBkUmMgrRouter::strToNum(const char* str, int len, int& ret) { return true; } -bool TeBkUmMgrRouter::route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif) { - //Timer t; - //t.start(); +bool TeBkUmMgrRouter::route(HttpRequest* req, HttpResponse* res, SocketInterface* sif) { std::string_view path = req->getPath(); if(StringUtil::endsWith(path, "/plaintext")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); res->setContent(HELLO_WORLD); res->setContentType(ContentTypes::CONTENT_TYPE_TEXT_PLAIN); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/json")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); TeBkUmMgrMessage msg; msg.setMessage(HELLO_WORLD); - JSONSerialize::serializeUnknown(&msg, 0, "TeBkUmMgrMessage", res->getContentP()); + JSONSerialize::serializeObject(&msg, m_ser, res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/db")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); TeBkUmMgrWorld msg; db(msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 0, "TeBkUmMgrWorld", res->getContentP()); + JSONSerialize::serializeObject(&msg, w_ser, res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/queries")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; queries(params[0].val, params[0].val_len, msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/fortunes")) { Context ctx; getContext(req, &ctx); - void* mkr = dlsym(ddlib, TPE_FN_NAME.c_str()); - if(mkr!=NULL) + if(tmplFunc!=NULL) { - TeBkUmMgrTemplatePtr f = (TeBkUmMgrTemplatePtr)mkr; - std::string msg; - f(&ctx, msg); - res->setContent(msg); + fcpstream str; + tmplFunc(&ctx, str); + res->setContent(str.str()); res->setContentType(ContentTypes::CONTENT_TYPE_TEXT_SHTML); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); } } else if(StringUtil::endsWith(path, "/updates")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; updates(params[0].val, params[0].val_len, msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/cached-worlds")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; cachedWorlds(params[0].val, params[0].val_len, msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else { res->setHTTPResponseStatus(HTTPResponseStatus::NotFound); } @@ -439,17 +394,19 @@ bool TeBkUmMgrRouter::route(HttpRequest* req, HttpResponse* res, void* dlib, voi return true; } -std::string TeBkUmMgrRouter::APP_NAME = ""; -std::string TeBkUmMgrRouter::TPE_FN_NAME = ""; +TemplatePtr TeBkUmMgrRouter::tmplFunc; +Ser TeBkUmMgrRouter::m_ser; +Ser TeBkUmMgrRouter::w_ser; +SerCont TeBkUmMgrRouter::wcont_ser; TeBkUmMgrRouter::TeBkUmMgrRouter() { #ifdef INC_SDORM_MONGO sqli = NULL; #endif - if(APP_NAME=="") { - APP_NAME = CommonUtils::normalizeAppName("te-benchmark-um-mgr"); - TPE_FN_NAME = CommonUtils::getTpeFnName("tpe/fortunes.tpe", "te-benchmark-um-mgr"); - } + tmplFunc = TemplateUtil::getTemplateFunc("te-benchmark-um-mgr", "tpe/fortunes.tpe"); + m_ser = Serializer::getSerFuncForObject("te-benchmark-um-mgr", "TeBkUmMgrMessage"); + w_ser = Serializer::getSerFuncForObject("te-benchmark-um-mgr", "TeBkUmMgrWorld"); + wcont_ser = Serializer::getSerFuncForObjectCont("te-benchmark-um-mgr", "TeBkUmMgrWorld", "std::vector"); } TeBkUmMgrRouter::~TeBkUmMgrRouter() { diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/autotools/Makefile.am b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/autotools/Makefile.am index 6e5fd5dd985..bef97f351c6 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/autotools/Makefile.am +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/src/autotools/Makefile.am @@ -8,7 +8,7 @@ distdir=${PACKAGE_NAME}-${PACKAGE_VERSION}-src fprefix=../../../../${packageIdentifier} prefix=${abs_builddir} -lib_LTLIBRARIES = libte_benchmark_um_mgr.la +lib_LTLIBRARIES = libte-benchmark-um-mgr.la libte_benchmark_um_mgr_la_SOURCES = ../TeBkUmMgr.cpp libte_benchmark_um_mgr_la_LDFLAGS = -no-undefined diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/tpe/fortunes.tpe b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/tpe/fortunes.tpe index 6bd604ada81..437458dd3f6 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/tpe/fortunes.tpe +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/tpe/fortunes.tpe @@ -6,7 +6,7 @@ #for(int i=0;i<(int)fortunes.size();i++)# - + #rof#
idmessage
$_S{fortunes.at(i).getId()}${fortunes.at(i).getMessage()}
${fortunes.at(i).getId()}${fortunes.at(i).getMessage()}
diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/xmake.lua b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/xmake.lua new file mode 100644 index 00000000000..7e693860aa9 --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-mgr/xmake.lua @@ -0,0 +1,12 @@ +add_includedirs("include/") + +local bindir = "$(projectdir)/ffead-cpp-6.0-bin" + +target("te-benchmark-um-mgr") + set_languages("c++17") + add_deps("ffead-framework") + add_options(getOptions()) + set_kind("shared") + on_load(setIncludes) + add_files("src/*.cpp") + set_installdir(bindir) diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/CMakeLists.txt b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/CMakeLists.txt index a03ca88f172..c3012c8848b 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/CMakeLists.txt +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/CMakeLists.txt @@ -5,6 +5,10 @@ file(GLOB sources ) include_directories("${CMAKE_SOURCE_DIR}/web/te-benchmark-um-pq-async/include") -add_library(te_benchmark_um_pq_async SHARED ${sources}) -set_property(TARGET te_benchmark_um_pq_async PROPERTY POSITION_INDEPENDENT_CODE ON) -target_link_libraries(te_benchmark_um_pq_async ffead-modules ffead-framework ${HAVE_PQLIB} ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) +if(BUILD_STATIC_LIBS OR EMSCRIPTEN) + add_library(te-benchmark-um-pq-async STATIC ${sources}) +else() + add_library(te-benchmark-um-pq-async ${sources}) +endif() +set_property(TARGET te-benchmark-um-pq-async PROPERTY POSITION_INDEPENDENT_CODE ON) +target_link_libraries(te-benchmark-um-pq-async ffead-modules ffead-framework ${HAVE_PQLIB} ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/config/application.xml b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/config/application.xml index 1c084b4d9c8..baab8a23bf6 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/config/application.xml +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/config/application.xml @@ -1,4 +1,4 @@ - + * GET, POST, HEAD, PUT, DELETE diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/include/TeBkUmLpqAsync.h b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/include/TeBkUmLpqAsync.h index 7e257a787e6..13c2d5297ac 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/include/TeBkUmLpqAsync.h +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/include/TeBkUmLpqAsync.h @@ -43,13 +43,13 @@ #include "yuarel.h" #include "Router.h" -typedef void (*TeBkUmLpqAsyncTemplatePtr) (Context*, std::string&); - class TeBkUmLpqAsyncWorld { int id; int randomNumber; public: TeBkUmLpqAsyncWorld(); + TeBkUmLpqAsyncWorld(int id); + TeBkUmLpqAsyncWorld(int id, int randomNumber); virtual ~TeBkUmLpqAsyncWorld(); int getId() const; void setId(int id); @@ -59,36 +59,44 @@ class TeBkUmLpqAsyncWorld { class TeBkUmLpqAsyncFortune { int id; - std::string message; public: + std::string message_i; + std::string_view message; + bool allocd; + TeBkUmLpqAsyncFortune(int id); + TeBkUmLpqAsyncFortune(int id, std::string message); TeBkUmLpqAsyncFortune(); virtual ~TeBkUmLpqAsyncFortune(); int getId() const; void setId(int id); - const std::string& getMessage() const; - void setMessage(const std::string& message); bool operator < (const TeBkUmLpqAsyncFortune& other) const; }; class TeBkUmLpqAsyncMessage { std::string message; public: + TeBkUmLpqAsyncMessage(); + TeBkUmLpqAsyncMessage(std::string message); virtual ~TeBkUmLpqAsyncMessage(); const std::string& getMessage() const; void setMessage(const std::string& message); }; struct AsyncReq { - HttpResponse r; + float httpVers; + bool conn_clos; SocketInterface* sif; - void* d; - void* ddlib; LibpqDataSourceImpl* sqli; + + TeBkUmLpqAsyncWorld w; + std::vector vec; + std::list flst; }; struct CacheReq { - void* d; CacheInterface* cchi; + + std::vector vec; }; class TeBkUmLpqAsyncRouter : public Router { @@ -98,12 +106,13 @@ class TeBkUmLpqAsyncRouter : public Router { static std::string WORLD_ALL_QUERY; static std::string FORTUNE_ALL_QUERY; - static std::string APP_NAME; - static std::string TPE_FN_NAME; + static TemplatePtr tmplFunc; - std::atomic which = { false }; + static Ser m_ser; + static Ser w_ser; + static SerCont wcont_ser; - bool strToNum(const char* str, int len, int& ret); + static bool strToNum(const char* str, int len, int& ret); void dbAsync(AsyncReq* req); static void dbAsyncUtil(void* ctx, int rn, int cn, char * d); @@ -113,6 +122,12 @@ class TeBkUmLpqAsyncRouter : public Router { static void queriesAsyncUtil(void* ctx, int rn, int cn, char * d); static void queriesAsyncCh(void* ctx, bool status, const std::string& q, int counter); +#ifndef HAVE_LIBPQ_BATCH + void queriesMultiAsync(const char*, int, AsyncReq*); + static void queriesMultiAsyncUtil(void* ctx, int, int, char *, int); + static void queriesMultiAsyncCh(void* ctx, bool status, const std::string& q, int counter); +#endif + void updatesAsync(const char* q, int ql, AsyncReq* req); static void updatesAsyncChQ(void* ctx, bool status, const std::string& q, int counter); static void updatesAsyncChU(void* ctx, bool status, const std::string& q, int counter); @@ -133,19 +148,18 @@ class TeBkUmLpqAsyncRouter : public Router { static std::map _qC; LibpqDataSourceImpl* sqli; LibpqDataSourceImpl* getDb(); + //static Logger logger; public: TeBkUmLpqAsyncRouter& operator=(const TeBkUmLpqAsyncRouter& a) { - which = false; return *this; } TeBkUmLpqAsyncRouter(const TeBkUmLpqAsyncRouter& a) { - which = false; sqli = NULL; } TeBkUmLpqAsyncRouter(); virtual ~TeBkUmLpqAsyncRouter(); void updateCache(); - bool route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif); + bool route(HttpRequest* req, HttpResponse* res, SocketInterface* sif); }; #endif /* WEB_TE_BENCHMARK_UM_INCLUDE_TeBkUmLpqAsync_H_ */ diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/meson.build b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/meson.build new file mode 100644 index 00000000000..99a4486e16a --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/meson.build @@ -0,0 +1,15 @@ + +module_includes = ['/usr/local/include','../../src/modules/common','../../src/modules/cache','../../src/modules/cache/memory','../../src/modules/cache/redis', + '../../src/modules/cache/memcached','../../src/modules/client-util','../../src/modules/http','../../src/modules/http/http11','../../src/modules/http/http20', + '../../src/modules/ioc','../../src/modules/jobs','../../src/modules/reflection','../../src/modules/sdorm','../../src/modules/sdorm/sql','../../src/modules/sdorm/sql/libpq', + '../../src/modules/client-util/ssl','../../src/modules/sdorm/mongo','../../src/modules/sdorm/mongo/raw','../../src/modules/search','../../src/modules/serialization', + '../../src/modules/serialization/xml','../../src/modules/serialization/json','../../src/modules/server-util','../../src/modules/ssl','../../src/modules/threads', + '../../src/framework','include'] +module_includes += global_includes +module_libs = [global_libs] + +c = run_command(meson_grabber, 'src/') +module_sources = c.stdout().strip().split('\n') + +shared_library('te-benchmark-um-pq-async', module_sources, include_directories: module_includes, dependencies: module_libs, link_with: [libffeadmodules, libffeadframework], + install: true, install_dir: bin_dir+'/lib') \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/TeBkUmLpqAsync.cpp b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/TeBkUmLpqAsync.cpp index ffe8c890ef1..724451a803f 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/TeBkUmLpqAsync.cpp +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/TeBkUmLpqAsync.cpp @@ -37,6 +37,16 @@ void TeBkUmLpqAsyncWorld::setRandomNumber(int randomNumber) { this->randomNumber = randomNumber; } +TeBkUmLpqAsyncWorld::TeBkUmLpqAsyncWorld(int id, int randomNumber) { + this->id = id; + this->randomNumber = randomNumber; +} + +TeBkUmLpqAsyncWorld::TeBkUmLpqAsyncWorld(int id) { + this->id = id; + randomNumber = 0; +} + TeBkUmLpqAsyncWorld::TeBkUmLpqAsyncWorld() { id = 0; randomNumber = 0; @@ -53,25 +63,40 @@ void TeBkUmLpqAsyncFortune::setId(int id) { this->id = id; } -const std::string& TeBkUmLpqAsyncFortune::getMessage() const { - return message; +TeBkUmLpqAsyncFortune::TeBkUmLpqAsyncFortune(int id) { + this->id = id; + allocd = false; } -void TeBkUmLpqAsyncFortune::setMessage(const std::string& message) { - this->message = message; +TeBkUmLpqAsyncFortune::TeBkUmLpqAsyncFortune(int id, std::string message) { + this->id = id; + this->message_i = message; + this->message = std::string_view(this->message_i); + allocd = false; } TeBkUmLpqAsyncFortune::TeBkUmLpqAsyncFortune() { id = 0; + allocd = false; } TeBkUmLpqAsyncFortune::~TeBkUmLpqAsyncFortune() { + if(allocd && message.size()>0) { + free((void *)message.data()); + } } bool TeBkUmLpqAsyncFortune::operator < (const TeBkUmLpqAsyncFortune& other) const { return message.compare(other.message)<0; } +TeBkUmLpqAsyncMessage::TeBkUmLpqAsyncMessage() { +} + +TeBkUmLpqAsyncMessage::TeBkUmLpqAsyncMessage(std::string message) { + this->message = message; +} + TeBkUmLpqAsyncMessage::~TeBkUmLpqAsyncMessage() { } @@ -91,7 +116,6 @@ std::string TeBkUmLpqAsyncRouter::FORTUNE_ALL_QUERY = "select id, message from f std::map TeBkUmLpqAsyncRouter::_qC; void TeBkUmLpqAsyncRouter::dbAsync(AsyncReq* req) { - req->d = new TeBkUmLpqAsyncWorld; LibpqDataSourceImpl* sqli = getDb(); int rid = rand() % 10000 + 1; try { @@ -105,34 +129,31 @@ void TeBkUmLpqAsyncRouter::dbAsync(AsyncReq* req) { } void TeBkUmLpqAsyncRouter::dbAsyncUtil(void* ctx, int rn, int cn, char * d) { AsyncReq* req = (AsyncReq*)ctx; - TeBkUmLpqAsyncWorld* w = (TeBkUmLpqAsyncWorld*)req->d; - if(cn==0)w->setId(ntohl(*((uint32_t *) d))); - if(cn==1)w->setRandomNumber(ntohl(*((uint32_t *) d))); + if(cn==0)req->w.setId(ntohl(*((uint32_t *) d))); + if(cn==1)req->w.setRandomNumber(ntohl(*((uint32_t *) d))); } void TeBkUmLpqAsyncRouter::dbAsyncCh(void* ctx, bool status, const std::string& q, int counter) { AsyncReq* req = (AsyncReq*)ctx; - TeBkUmLpqAsyncWorld* w = (TeBkUmLpqAsyncWorld*)req->d; - req->r.setHTTPResponseStatus(HTTPResponseStatus::Ok); - std::string c; - JSONSerialize::serializeUnknown(w, 0, "TeBkUmLpqAsyncWorld", &c, APP_NAME); + HttpResponse r; + r.setHTTPResponseStatus(HTTPResponseStatus::Ok); + JSONSerialize::serializeObject(&req->w, w_ser, r.getContentP()); std::string d; - req->r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, (int)c.length()); + r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, req->httpVers, req->conn_clos); req->sif->writeDirect(d); - req->sif->writeDirect(c); + req->sif->writeDirect(r.getContent()); req->sif->unUse(); - delete w; delete req; } void TeBkUmLpqAsyncRouter::queriesAsync(const char* q, int ql, AsyncReq* req) { - req->d = new std::vector; - int queryCount = 0; strToNum(q, ql, queryCount); if(queryCount<1)queryCount=1; else if(queryCount>500)queryCount=500; + req->vec.reserve(queryCount); + LibpqDataSourceImpl* sqli = getDb(); try { @@ -150,28 +171,73 @@ void TeBkUmLpqAsyncRouter::queriesAsync(const char* q, int ql, AsyncReq* req) { } void TeBkUmLpqAsyncRouter::queriesAsyncUtil(void* ctx, int rn, int cn, char * d) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* vec = (std::vector*)req->d; if(cn==0) { - vec->push_back(TeBkUmLpqAsyncWorld()); + req->vec.emplace_back(ntohl(*((uint32_t *) d))); + } else { + req->vec.back().setRandomNumber(ntohl(*((uint32_t *) d))); } - TeBkUmLpqAsyncWorld& w = vec->at(vec->size()-1); - if(cn==0)w.setId(ntohl(*((uint32_t *) d))); - if(cn==1)w.setRandomNumber(ntohl(*((uint32_t *) d))); } void TeBkUmLpqAsyncRouter::queriesAsyncCh(void* ctx, bool status, const std::string& q, int counter) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* vec = (std::vector*)req->d; - req->r.setHTTPResponseStatus(HTTPResponseStatus::Ok); - std::string c; - JSONSerialize::serializeUnknown(vec, 100, "std::vector", &c, APP_NAME); + HttpResponse r; + r.setHTTPResponseStatus(HTTPResponseStatus::Ok); + JSONSerialize::serializeObjectCont(&req->vec, wcont_ser, "vector", r.getContentP()); + std::string d; + r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, req->httpVers, req->conn_clos); + req->sif->writeDirect(d); + req->sif->writeDirect(r.getContent()); + req->sif->unUse(); + delete req; +} + + +#ifndef HAVE_LIBPQ_BATCH +void TeBkUmLpqAsyncRouter::queriesMultiAsync(const char* q, int ql, AsyncReq* req) { + int queryCount = 0; + strToNum(q, ql, queryCount); + if(queryCount<1)queryCount=1; + else if(queryCount>500)queryCount=500; + + req->vec.reserve(queryCount); + + LibpqDataSourceImpl* sqli = getDb(); + + try { + std::stringstream ss; + for (int c = 0; c < queryCount; ++c) { + int rid = rand() % 10000 + 1; + ss << "select id, randomnumber from world where id = " << rid << ";"; + } + void* areq = sqli->executeMultiQueryAsync(ss.str(), req, &TeBkUmLpqAsyncRouter::queriesMultiAsyncUtil, &TeBkUmLpqAsyncRouter::queriesMultiAsyncCh); + sqli->completeAsync(areq, queryCount); + } catch(const std::exception& e) { + throw e; + } +} +void TeBkUmLpqAsyncRouter::queriesMultiAsyncUtil(void* ctx, int rn, int cn, char * d, int l) { + AsyncReq* req = (AsyncReq*)ctx; + int tmp = 0; + strToNum(d, l, tmp); + if(cn==0) { + req->vec.emplace_back(tmp); + } else { + req->vec.back().setRandomNumber(tmp); + } +} +void TeBkUmLpqAsyncRouter::queriesMultiAsyncCh(void* ctx, bool status, const std::string& q, int counter) { + AsyncReq* req = (AsyncReq*)ctx; + HttpResponse r; + r.setHTTPResponseStatus(HTTPResponseStatus::Ok); + JSONSerialize::serializeObjectCont(&req->vec, wcont_ser, "vector", r.getContentP()); std::string d; - req->r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, (int)c.length()); + r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, req->httpVers, req->conn_clos); req->sif->writeDirect(d); - req->sif->writeDirect(c); + req->sif->writeDirect(r.getContent()); req->sif->unUse(); - delete vec; delete req; } +#endif + std::string& TeBkUmLpqAsyncRouter::getUpdQuery(int count) { @@ -202,13 +268,13 @@ std::string& TeBkUmLpqAsyncRouter::getUpdQuery(int count) { } void TeBkUmLpqAsyncRouter::updatesAsyncb(const char* q, int ql, AsyncReq* req) { - req->d = new std::vector; - int queryCount = 0; strToNum(q, ql, queryCount); if(queryCount<1)queryCount=1; else if(queryCount>500)queryCount=500; + req->vec.reserve(queryCount); + LibpqDataSourceImpl* sqli = getDb(); req->sqli = sqli; @@ -227,28 +293,27 @@ void TeBkUmLpqAsyncRouter::updatesAsyncb(const char* q, int ql, AsyncReq* req) { } void TeBkUmLpqAsyncRouter::updatesAsyncbChQ(void* ctx, bool status, const std::string& q, int counter) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* vec = (std::vector*)req->d; LibpqDataSourceImpl* sqli = req->sqli; - int queryCount = (int)vec->size(); + int queryCount = (int)req->vec.size(); std::vector pars; - for (int c = 0; c < queryCount; ++c) { - LibpqDataSourceImpl::ADD_INT4(pars, vec->at(c).getId()); + for(std::vector::iterator it=req->vec.begin(); it != req->vec.end(); ++it) { + LibpqDataSourceImpl::ADD_INT4(pars, (*it).getId()); int newRandomNumber = rand() % 10000 + 1; - if(vec->at(c).getRandomNumber() == newRandomNumber) { + if((*it).getRandomNumber() == newRandomNumber) { newRandomNumber += 1; if(newRandomNumber>=10000) { newRandomNumber = 1; } } LibpqDataSourceImpl::ADD_INT4(pars, newRandomNumber); - vec->at(c).setRandomNumber(newRandomNumber); + (*it).setRandomNumber(newRandomNumber); } - for (int c = 0; c < queryCount; ++c) { - LibpqDataSourceImpl::ADD_INT4(pars, vec->at(c).getId()); + for(std::vector::iterator it=req->vec.begin(); it != req->vec.end(); ++it) { + LibpqDataSourceImpl::ADD_INT4(pars, (*it).getId()); } void* areq = sqli->beginAsync(NULL); @@ -257,9 +322,9 @@ void TeBkUmLpqAsyncRouter::updatesAsyncbChQ(void* ctx, bool status, const std::s AsyncReq* ar = new AsyncReq; ar->sif = req->sif; - ar->r = std::move(req->r); - ar->d = req->d; - req->d = NULL; + ar->httpVers = req->httpVers; + ar->conn_clos = req->conn_clos; + ar->vec = std::move(req->vec); req->sif = NULL; try { @@ -270,27 +335,25 @@ void TeBkUmLpqAsyncRouter::updatesAsyncbChQ(void* ctx, bool status, const std::s } void TeBkUmLpqAsyncRouter::updatesAsyncbChU(void* ctx, bool status, const std::string& q, int counter) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* vec = (std::vector*)req->d; - req->r.setHTTPResponseStatus(HTTPResponseStatus::Ok); - std::string c; - JSONSerialize::serializeUnknown(vec, 100, "std::vector", &c, APP_NAME); + HttpResponse r; + r.setHTTPResponseStatus(HTTPResponseStatus::Ok); + JSONSerialize::serializeObjectCont(&req->vec, wcont_ser, "vector", r.getContentP()); std::string d; - req->r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, (int)c.length()); + r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, req->httpVers, req->conn_clos); req->sif->writeDirect(d); - req->sif->writeDirect(c); + req->sif->writeDirect(r.getContent()); req->sif->unUse(); - delete vec; delete req; } void TeBkUmLpqAsyncRouter::updatesAsync(const char* q, int ql, AsyncReq* req) { - req->d = new std::vector; - int queryCount = 0; strToNum(q, ql, queryCount); if(queryCount<1)queryCount=1; else if(queryCount>500)queryCount=500; + req->vec.reserve(queryCount); + LibpqDataSourceImpl* sqli = getDb(); req->sqli = sqli; @@ -309,7 +372,6 @@ void TeBkUmLpqAsyncRouter::updatesAsync(const char* q, int ql, AsyncReq* req) { } void TeBkUmLpqAsyncRouter::updatesAsyncChQ(void* ctx, bool status, const std::string& q, int counter) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* vec = (std::vector*)req->d; std::stringstream ss; //ss << "update world as t set randomnumber = c.randomnumber from (values"; @@ -317,16 +379,15 @@ void TeBkUmLpqAsyncRouter::updatesAsyncChQ(void* ctx, bool status, const std::st LibpqDataSourceImpl* sqli = req->sqli; void* areq = NULL; - int queryCount = (int)vec->size(); - for (int c = 0; c < queryCount; ++c) { + for(std::vector::iterator it=req->vec.begin(); it != req->vec.end(); ++it) { int newRandomNumber = rand() % 10000 + 1; - if(vec->at(c).getRandomNumber() == newRandomNumber) { + if((*it).getRandomNumber() == newRandomNumber) { newRandomNumber += 1; if(newRandomNumber>=10000) { newRandomNumber = 1; } } - vec->at(c).setRandomNumber(newRandomNumber); + (*it).setRandomNumber(newRandomNumber); if(areq==NULL) { areq = sqli->beginAsync(areq); } else { @@ -334,7 +395,7 @@ void TeBkUmLpqAsyncRouter::updatesAsyncChQ(void* ctx, bool status, const std::st } ss.str(std::string()); std::vector pars; - ss << "update world set randomnumber = " << newRandomNumber << " where id = " << vec->at(c).getId(); + ss << "update world set randomnumber = " << newRandomNumber << " where id = " << (*it).getId(); sqli->executeUpdateQueryAsync(ss.str(), std::move(pars), NULL, NULL, areq, false); sqli->commitAsync(areq); /*if(c!=queryCount-1) { @@ -345,9 +406,9 @@ void TeBkUmLpqAsyncRouter::updatesAsyncChQ(void* ctx, bool status, const std::st AsyncReq* ar = new AsyncReq; ar->sif = req->sif; - ar->r = std::move(req->r); - ar->d = req->d; - req->d = NULL; + ar->httpVers = req->httpVers; + ar->conn_clos = req->conn_clos; + ar->vec = std::move(req->vec); req->sif = NULL; try { @@ -358,16 +419,14 @@ void TeBkUmLpqAsyncRouter::updatesAsyncChQ(void* ctx, bool status, const std::st } void TeBkUmLpqAsyncRouter::updatesAsyncChU(void* ctx, bool status, const std::string& q, int counter) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* vec = (std::vector*)req->d; - req->r.setHTTPResponseStatus(HTTPResponseStatus::Ok); - std::string c; - JSONSerialize::serializeUnknown(vec, 100, "std::vector", &c, APP_NAME); + HttpResponse r; + r.setHTTPResponseStatus(HTTPResponseStatus::Ok); + JSONSerialize::serializeObjectCont(&req->vec, wcont_ser, "vector", r.getContentP()); std::string d; - req->r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, (int)c.length()); + r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, req->httpVers, req->conn_clos); req->sif->writeDirect(d); - req->sif->writeDirect(c); + req->sif->writeDirect(r.getContent()); req->sif->unUse(); - delete vec; delete req; } @@ -375,7 +434,6 @@ void TeBkUmLpqAsyncRouter::updateCache() { LibpqDataSourceImpl* sqli = getDb(); CacheReq* req = new CacheReq; - req->d = new std::vector; req->cchi = CacheManager::getImpl(); try { @@ -388,29 +446,22 @@ void TeBkUmLpqAsyncRouter::updateCache() { } void TeBkUmLpqAsyncRouter::updateCacheAsyncUtil(void* ctx, int rn, std::vector& data) { CacheReq* req = (CacheReq*)ctx; - std::vector* wlist = (std::vector*)req->d; - TeBkUmLpqAsyncWorld w; - w.setId(ntohl(*((uint32_t *) data.at(0).d))); - w.setRandomNumber(ntohl(*((uint32_t *) data.at(1).d))); - wlist->push_back(w); + req->vec.emplace_back(ntohl(*((uint32_t *) data.at(0).d)), ntohl(*((uint32_t *) data.at(1).d))); } void TeBkUmLpqAsyncRouter::updateCacheAsyncCh(void* ctx, bool status, const std::string& q, int counter) { CacheReq* req = (CacheReq*)ctx; - std::vector* wlist = (std::vector*)req->d; CacheInterface* cchi = req->cchi; try { - for (int c = 0; c < (int)wlist->size(); ++c) { - TeBkUmLpqAsyncWorld& w = wlist->at(c); + for(std::vector::iterator it=req->vec.begin(); it != req->vec.end(); ++it) { char str[12]; - sprintf(str, "%d;%d", w.getId(), w.getRandomNumber()); - cchi->setRaw(CastUtil::fromNumber(w.getId()), str); + sprintf(str, "%d;%d", (*it).getId(), (*it).getRandomNumber()); + cchi->setRaw(CastUtil::fromNumber((*it).getId()), str); } - delete wlist; CacheManager::cleanImpl(cchi); delete req; + CacheManager::triggerAppInitCompletion("te-benchmark-um-pq-async"); } catch(const std::exception& e) { - delete wlist; CacheManager::cleanImpl(cchi); delete req; throw e; @@ -423,22 +474,21 @@ void TeBkUmLpqAsyncRouter::cachedWorlds(const char* q, int ql, std::vector500)queryCount=500; + wlst.reserve(queryCount); + CacheInterface* cchi = CacheManager::getImpl(); try { std::vector keys; for (int c = 0; c < queryCount; ++c) { int rid = rand() % 10000 + 1; - TeBkUmLpqAsyncWorld w; std::string v = cchi->getValue(CastUtil::fromNumber(rid)); size_t fn = v.find(";"); int tmp = 0; strToNum(v.substr(0, fn).c_str(), fn, tmp); - w.setId(tmp); - tmp = 0; - strToNum(v.substr(fn+1).c_str(), v.length()-fn-1, tmp); - w.setRandomNumber(tmp); - wlst.push_back(w); + int tmp1 = 0; + strToNum(v.substr(fn+1).c_str(), v.length()-fn-1, tmp1); + wlst.emplace_back(tmp, tmp1); } CacheManager::cleanImpl(cchi); } catch(const std::exception& e) { @@ -449,8 +499,6 @@ void TeBkUmLpqAsyncRouter::cachedWorlds(const char* q, int ql, std::vectord = new std::vector; - LibpqDataSourceImpl* sqli = getDb(); try { @@ -463,50 +511,40 @@ void TeBkUmLpqAsyncRouter::getContextAsync(AsyncReq* req) { } void TeBkUmLpqAsyncRouter::getContextAsyncUtil(void* ctx, int rn, int cn, char * d, int l) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* flst = (std::vector*)req->d; if(cn==0) { - flst->push_back(TeBkUmLpqAsyncFortune()); - } - TeBkUmLpqAsyncFortune& w = flst->at(flst->size()-1); - if(cn==0)w.setId(ntohl(*((uint32_t *) d))); - else { - std::string nm = std::string(d, l); - CryptoHandler::sanitizeHtml(nm); - w.setMessage(nm); + req->flst.emplace_back(ntohl(*((uint32_t *) d))); + } else { + TeBkUmLpqAsyncFortune& w = req->flst.back(); + w.message = CryptoHandler::sanitizeHtmlFast((const uint8_t *)d, (size_t)l, w.message_i, w.allocd); } } + void TeBkUmLpqAsyncRouter::getContextAsyncCh(void* ctx, bool status, const std::string& q, int counter) { AsyncReq* req = (AsyncReq*)ctx; - std::vector* flst = (std::vector*)req->d; - Context context; - TeBkUmLpqAsyncFortune nf; - nf.setId(0); - nf.setMessage("Additional fortune added at request time."); - flst->push_back(nf); - std::sort (flst->begin(), flst->end()); + req->flst.emplace_back(0, "Additional fortune added at request time."); + req->flst.sort(); - context.insert(std::pair("fortunes", flst)); + context.insert(std::pair("fortunes", &req->flst)); - void* mkr = dlsym(req->ddlib, TPE_FN_NAME.c_str()); - if(mkr!=NULL) + if(tmplFunc!=NULL) { - TeBkUmLpqAsyncTemplatePtr f = (TeBkUmLpqAsyncTemplatePtr)mkr; - std::string msg; - f(&context, msg); - req->r.setContent(msg); - req->r.setHTTPResponseStatus(HTTPResponseStatus::Ok); + fcpstream str; + tmplFunc(&context, str); + HttpResponse r; + r.setHTTPResponseStatus(HTTPResponseStatus::Ok); std::string d; - req->r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_TEXT_SHTML, (int)msg.length()); + r.generateHeadResponse(d, ContentTypes::CONTENT_TYPE_TEXT_SHTML, req->httpVers, req->conn_clos, (int)str.str().length()); req->sif->writeDirect(d); - req->sif->writeDirect(msg); + req->sif->writeDirect(str.str()); req->sif->unUse(); } else { ResponseData d; - req->r.generateHeadResponse(d._b); + HttpResponse r; + r.generateHeadResponse(d._b, req->httpVers, req->conn_clos); req->sif->writeTo(&d); req->sif->unUse(); } @@ -523,71 +561,88 @@ bool TeBkUmLpqAsyncRouter::strToNum(const char* str, int len, int& ret) { return true; } -bool TeBkUmLpqAsyncRouter::route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif) { - std::string_view path = req->getPath(); +bool TeBkUmLpqAsyncRouter::route(HttpRequest* req, HttpResponse* res, SocketInterface* sif) { sif->use(); - if(StringUtil::endsWith(path, "/plaintext")) { + if(StringUtil::endsWith(req->getPath(), "/plaintext")) { res->setHTTPResponseStatus(HTTPResponseStatus::Ok); std::string d; res->generateHeadResponse(d, ContentTypes::CONTENT_TYPE_TEXT_PLAIN, (int)HELLO_WORLD.length()); sif->writeDirect(d); sif->writeDirect(HELLO_WORLD); sif->unUse(); - } else if(StringUtil::endsWith(path, "/json")) { + } else if(StringUtil::endsWith(req->getPath(), "/json")) { TeBkUmLpqAsyncMessage msg; msg.setMessage(HELLO_WORLD); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - std::string c; - JSONSerialize::serializeUnknown(&msg, 0, "TeBkUmLpqAsyncMessage", &c, APP_NAME); + JSONSerialize::serializeObject(&msg, m_ser, res->getContentP()); std::string d; - res->generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, (int)c.length()); + res->generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON); sif->writeDirect(d); - sif->writeDirect(c); + sif->writeDirect(res->getContent()); sif->unUse(); - } else if(StringUtil::endsWith(path, "/db")) { + } else if(StringUtil::endsWith(req->getPath(), "/db")) { AsyncReq* ar = new AsyncReq; ar->sif = sif; - ar->r.update(req); + ar->httpVers = req->getHttpVers(); + ar->conn_clos = req->isClose(); dbAsync(ar); - } else if(StringUtil::endsWith(path, "/queries")) { + } else if(StringUtil::endsWith(req->getPath(), "/queries")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); AsyncReq* ar = new AsyncReq; ar->sif = sif; - ar->r.update(req); + ar->httpVers = req->getHttpVers(); + ar->conn_clos = req->isClose(); queriesAsync(params[0].val, params[0].val_len, ar); - } else if(StringUtil::endsWith(path, "/fortunes")) { + } +#ifndef HAVE_LIBPQ_BATCH + else if(StringUtil::endsWith(req->getPath(), "/queriem")) { + struct yuarel_param params[1]; + yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); + AsyncReq* ar = new AsyncReq; + ar->sif = sif; + ar->sif = sif; + ar->httpVers = req->getHttpVers(); + ar->conn_clos = req->isClose(); + queriesMultiAsync(params[0].val, params[0].val_len, ar); + } +#endif + else if(StringUtil::endsWith(req->getPath(), "/fortunes")) { AsyncReq* ar = new AsyncReq; ar->sif = sif; - ar->ddlib = ddlib; - ar->r.update(req); + ar->sif = sif; + ar->httpVers = req->getHttpVers(); + ar->conn_clos = req->isClose(); getContextAsync(ar); - } else if(StringUtil::endsWith(path, "/bupdates")) { + } else if(StringUtil::endsWith(req->getPath(), "/bupdates")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); AsyncReq* ar = new AsyncReq; ar->sif = sif; - ar->r.update(req); + ar->sif = sif; + ar->httpVers = req->getHttpVers(); + ar->conn_clos = req->isClose(); updatesAsyncb(params[0].val, params[0].val_len, ar); - } else if(StringUtil::endsWith(path, "/updates")) { + } else if(StringUtil::endsWith(req->getPath(), "/updates")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); AsyncReq* ar = new AsyncReq; ar->sif = sif; - ar->r.update(req); + ar->sif = sif; + ar->httpVers = req->getHttpVers(); + ar->conn_clos = req->isClose(); updatesAsync(params[0].val, params[0].val_len, ar); - } else if(StringUtil::endsWith(path, "/cached-worlds")) { + } else if(StringUtil::endsWith(req->getPath(), "/cached-worlds")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; cachedWorlds(params[0].val, params[0].val_len, msg); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - std::string c; - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", &c, APP_NAME); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); std::string d; - res->generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON, (int)c.length()); + res->generateHeadResponse(d, ContentTypes::CONTENT_TYPE_APPLICATION_JSON); sif->writeDirect(d); - sif->writeDirect(c); + sif->writeDirect(res->getContent()); sif->unUse(); } else { res->setHTTPResponseStatus(HTTPResponseStatus::NotFound); @@ -599,15 +654,17 @@ bool TeBkUmLpqAsyncRouter::route(HttpRequest* req, HttpResponse* res, void* dlib return false; } -std::string TeBkUmLpqAsyncRouter::APP_NAME = ""; -std::string TeBkUmLpqAsyncRouter::TPE_FN_NAME = ""; +TemplatePtr TeBkUmLpqAsyncRouter::tmplFunc; +Ser TeBkUmLpqAsyncRouter::m_ser; +Ser TeBkUmLpqAsyncRouter::w_ser; +SerCont TeBkUmLpqAsyncRouter::wcont_ser; TeBkUmLpqAsyncRouter::TeBkUmLpqAsyncRouter() { sqli = NULL; - if(APP_NAME=="") { - APP_NAME = CommonUtils::normalizeAppName("te-benchmark-um-pq-async"); - TPE_FN_NAME = CommonUtils::getTpeFnName("tpe/fortunes.tpe", "te-benchmark-um-pq-async"); - } + tmplFunc = TemplateUtil::getTemplateFunc("te-benchmark-um-pq-async", "tpe/fortunes.tpe"); + m_ser = Serializer::getSerFuncForObject("te-benchmark-um-pq-async", "TeBkUmLpqAsyncMessage"); + w_ser = Serializer::getSerFuncForObject("te-benchmark-um-pq-async", "TeBkUmLpqAsyncWorld"); + wcont_ser = Serializer::getSerFuncForObjectCont("te-benchmark-um-pq-async", "TeBkUmLpqAsyncWorld", "std::vector"); } TeBkUmLpqAsyncRouter::~TeBkUmLpqAsyncRouter() { diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/autotools/Makefile.am b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/autotools/Makefile.am index 31ef35ee381..ef7f63e9d0d 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/autotools/Makefile.am +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/src/autotools/Makefile.am @@ -8,7 +8,7 @@ distdir=${PACKAGE_NAME}-${PACKAGE_VERSION}-src fprefix=../../../../${packageIdentifier} prefix=${abs_builddir} -lib_LTLIBRARIES = libte_benchmark_um_pq_async.la +lib_LTLIBRARIES = libte-benchmark-um-pq-async.la libte_benchmark_um_pq_async_la_SOURCES = ../TeBkUmLpqAsync.cpp libte_benchmark_um_pq_async_la_LDFLAGS = -no-undefined diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/tpe/fortunes.tpe b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/tpe/fortunes.tpe index d797bfd842a..e27f925a123 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/tpe/fortunes.tpe +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/tpe/fortunes.tpe @@ -1,12 +1,12 @@ -#declare std::vector fortunes# +#declareref std::list* fortunes# Fortunes -#for(int i=0;i<(int)fortunes.size();i++)# - +#for(std::list::iterator it=fortunes->begin(); it != fortunes->end(); ++it)# + #rof#
idmessage
$_S{fortunes.at(i).getId()}${fortunes.at(i).getMessage()}
${(*it).getId()}${(*it).message}
diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/xmake.lua b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/xmake.lua new file mode 100644 index 00000000000..c68a2fc4e46 --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq-async/xmake.lua @@ -0,0 +1,12 @@ +add_includedirs("include/") + +local bindir = "$(projectdir)/ffead-cpp-6.0-bin" + +target("te-benchmark-um-pq-async") + set_languages("c++17") + add_deps("ffead-framework") + add_options(getOptions()) + set_kind("shared") + on_load(setIncludes) + add_files("src/*.cpp") + set_installdir(bindir) diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/CMakeLists.txt b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/CMakeLists.txt index b7f5ed95fd6..bd4180550a3 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/CMakeLists.txt +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/CMakeLists.txt @@ -5,6 +5,10 @@ file(GLOB sources ) include_directories("${CMAKE_SOURCE_DIR}/web/te-benchmark-um-pq/include") -add_library(te_benchmark_um_pq SHARED ${sources}) -set_property(TARGET te_benchmark_um_pq PROPERTY POSITION_INDEPENDENT_CODE ON) -target_link_libraries(te_benchmark_um_pq ffead-modules ffead-framework ${HAVE_PQLIB} ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) +if(BUILD_STATIC_LIBS OR EMSCRIPTEN) + add_library(te-benchmark-um-pq STATIC ${sources}) +else() + add_library(te-benchmark-um-pq ${sources}) +endif() +set_property(TARGET te-benchmark-um-pq PROPERTY POSITION_INDEPENDENT_CODE ON) +target_link_libraries(te-benchmark-um-pq ffead-modules ffead-framework ${HAVE_PQLIB} ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/config/application.xml b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/config/application.xml index 66aa829019d..e2f2b918aed 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/config/application.xml +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/config/application.xml @@ -1,4 +1,4 @@ - + * GET, POST, HEAD, PUT, DELETE diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/config/cache.xml b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/config/cache.xml deleted file mode 100644 index b9fefe3abb2..00000000000 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/config/cache.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - sumeet - sumeet - - - TeBkUmLpqRouter.updateCache - Memory-Cached - memory - - \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/include/TeBkUmLpq.h b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/include/TeBkUmLpq.h index e0ee05aad1f..706208c2d7e 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/include/TeBkUmLpq.h +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/include/TeBkUmLpq.h @@ -24,6 +24,7 @@ #define WEB_TE_BENCHMARK_UM_INCLUDE_TeBkUmLpq_H_ #include "TemplateHandler.h" #include "vector" +#include "list" #ifndef OS_MINGW #include #include @@ -32,7 +33,6 @@ #include #include #include "CryptoHandler.h" -#include "vector" #include "CastUtil.h" #include #include "CacheManager.h" @@ -42,13 +42,14 @@ #include "string" #include "yuarel.h" #include "Router.h" - -typedef void (*TeBkUmLpqTemplatePtr) (Context*, std::string&); +#include "Reflector.h" class TeBkUmLpqWorld { int id; int randomNumber; public: + TeBkUmLpqWorld(int id); + TeBkUmLpqWorld(int id, int randomNumber); TeBkUmLpqWorld(); virtual ~TeBkUmLpqWorld(); int getId() const; @@ -66,20 +67,24 @@ struct UpdQrData { class TeBkUmLpqFortune { int id; - std::string message; public: + std::string message_i; + std::string_view message; + bool allocd; + TeBkUmLpqFortune(int id); + TeBkUmLpqFortune(int id, std::string message); TeBkUmLpqFortune(); virtual ~TeBkUmLpqFortune(); int getId() const; void setId(int id); - const std::string& getMessage() const; - void setMessage(const std::string& message); bool operator < (const TeBkUmLpqFortune& other) const; }; class TeBkUmLpqMessage { std::string message; public: + TeBkUmLpqMessage(); + TeBkUmLpqMessage(std::string message); virtual ~TeBkUmLpqMessage(); const std::string& getMessage() const; void setMessage(const std::string& message); @@ -92,8 +97,11 @@ class TeBkUmLpqRouter : public Router { static std::string WORLD_ALL_QUERY; static std::string FORTUNE_ALL_QUERY; - static std::string APP_NAME; - static std::string TPE_FN_NAME; + static TemplatePtr tmplFunc; + + static Ser m_ser; + static Ser w_ser; + static SerCont wcont_ser; static bool strToNum(const char* str, int len, int& ret); @@ -112,7 +120,7 @@ class TeBkUmLpqRouter : public Router { void cachedWorlds(const char*, int, std::vector&); static void updateCacheUtil(void* ctx, int rn, std::vector& data); - void getContext(HttpRequest* request, Context* context); + void handleTemplate(HttpResponse* res); static void getContextUtil(void* ctx, int, int, char *, int); std::map _qC; @@ -124,7 +132,7 @@ class TeBkUmLpqRouter : public Router { TeBkUmLpqRouter(); virtual ~TeBkUmLpqRouter(); void updateCache(); - bool route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif); + bool route(HttpRequest* req, HttpResponse* res, SocketInterface* sif); }; #endif /* WEB_TE_BENCHMARK_UM_INCLUDE_TeBkUmLpq_H_ */ diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/meson.build b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/meson.build new file mode 100644 index 00000000000..7466f03cd69 --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/meson.build @@ -0,0 +1,15 @@ + +module_includes = ['/usr/local/include','../../src/modules/common','../../src/modules/cache','../../src/modules/cache/memory','../../src/modules/cache/redis', + '../../src/modules/cache/memcached','../../src/modules/client-util','../../src/modules/http','../../src/modules/http/http11','../../src/modules/http/http20', + '../../src/modules/ioc','../../src/modules/jobs','../../src/modules/reflection','../../src/modules/sdorm','../../src/modules/sdorm/sql','../../src/modules/sdorm/sql/libpq', + '../../src/modules/client-util/ssl','../../src/modules/sdorm/mongo','../../src/modules/sdorm/mongo/raw','../../src/modules/search','../../src/modules/serialization', + '../../src/modules/serialization/xml','../../src/modules/serialization/json','../../src/modules/server-util','../../src/modules/ssl','../../src/modules/threads', + '../../src/framework','include'] +module_includes += global_includes +module_libs = [global_libs] + +c = run_command(meson_grabber, 'src/') +module_sources = c.stdout().strip().split('\n') + +shared_library('te-benchmark-um-pq', module_sources, include_directories: module_includes, dependencies: module_libs, link_with: [libffeadmodules, libffeadframework], + install: true, install_dir: bin_dir+'/lib') \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/TeBkUmLpq.cpp b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/TeBkUmLpq.cpp index 509fa4015f6..51b957346bc 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/TeBkUmLpq.cpp +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/TeBkUmLpq.cpp @@ -37,6 +37,16 @@ void TeBkUmLpqWorld::setRandomNumber(int randomNumber) { this->randomNumber = randomNumber; } +TeBkUmLpqWorld::TeBkUmLpqWorld(int id) { + this->id = id; + randomNumber = 0; +} + +TeBkUmLpqWorld::TeBkUmLpqWorld(int id, int randomNumber) { + this->id = id; + this->randomNumber = randomNumber; +} + TeBkUmLpqWorld::TeBkUmLpqWorld() { id = 0; randomNumber = 0; @@ -53,25 +63,40 @@ void TeBkUmLpqFortune::setId(int id) { this->id = id; } -const std::string& TeBkUmLpqFortune::getMessage() const { - return message; +TeBkUmLpqFortune::TeBkUmLpqFortune(int id) { + this->id = id; + allocd = false; } -void TeBkUmLpqFortune::setMessage(const std::string& message) { - this->message = message; +TeBkUmLpqFortune::TeBkUmLpqFortune(int id, std::string message) { + this->id = id; + this->message_i = message; + this->message = std::string_view(this->message_i); + allocd = false; } TeBkUmLpqFortune::TeBkUmLpqFortune() { id = 0; + allocd = false; } TeBkUmLpqFortune::~TeBkUmLpqFortune() { + if(allocd && message.size()>0) { + free((void *)message.data()); + } } bool TeBkUmLpqFortune::operator < (const TeBkUmLpqFortune& other) const { return message.compare(other.message)<0; } +TeBkUmLpqMessage::TeBkUmLpqMessage() { +} + +TeBkUmLpqMessage::TeBkUmLpqMessage(std::string message) { + this->message = message; +} + TeBkUmLpqMessage::~TeBkUmLpqMessage() { } @@ -93,9 +118,9 @@ void TeBkUmLpqRouter::db(TeBkUmLpqWorld& w) { LibpqDataSourceImpl* sqli = getDb(); int rid = rand() % 10000 + 1; try { - std::vector pars; - LibpqDataSourceImpl::ADD_INT4(pars, rid); - sqli->executeQuery(WORLD_ONE_QUERY, pars, &w, &TeBkUmLpqRouter::dbUtil); + LibpqParams<1> pars; + pars.int4(rid); + sqli->executeQuery(WORLD_ONE_QUERY, &pars, &w, &TeBkUmLpqRouter::dbUtil); } catch(const std::exception& e) { throw e; } @@ -106,26 +131,28 @@ void TeBkUmLpqRouter::dbUtil(void* ctx, int rn, int cn, char * d) { if(cn==1)w->setRandomNumber(ntohl(*((uint32_t *) d))); } - void TeBkUmLpqRouter::queries(const char* q, int ql, std::vector& wlst) { int queryCount = 0; strToNum(q, ql, queryCount); if(queryCount<1)queryCount=1; else if(queryCount>500)queryCount=500; + wlst.reserve(queryCount); + LibpqDataSourceImpl* sqli = getDb(); + LibpqParamsBase* pars = sqli->getParams(queryCount); try { - TeBkUmLpqWorld w; - std::vector pars; for (int c = 0; c < queryCount; ++c) { int rid = rand() % 10000 + 1; - pars.clear(); - LibpqDataSourceImpl::ADD_INT4(pars, rid); + pars->int4(rid); + wlst.emplace_back(); + TeBkUmLpqWorld& w = wlst.back(); sqli->executeQuery(WORLD_ONE_QUERY, pars, &w, &TeBkUmLpqRouter::dbUtil); - wlst.push_back(w); } + delete pars; } catch(const std::exception& e) { + delete pars; throw e; } } @@ -137,6 +164,8 @@ void TeBkUmLpqRouter::queriesMulti(const char* q, int ql, std::vector500)queryCount=500; + wlst.reserve(queryCount); + LibpqDataSourceImpl* sqli = getDb(); UpdQrData updt; @@ -161,14 +190,13 @@ void TeBkUmLpqRouter::queriesMulti(const char* q, int ql, std::vector* wlst = (std::vector*)ctx; - if(cn==0) { - wlst->push_back(TeBkUmLpqWorld()); - } - TeBkUmLpqWorld& w = wlst->at(wlst->size()-1); int tmp = 0; strToNum(d, l, tmp); - if(cn==0)w.setId(tmp); - if(cn==1)w.setRandomNumber(tmp); + if(cn==0) { + wlst->emplace_back(tmp); + } else { + wlst->back().setRandomNumber(tmp); + } } std::string& TeBkUmLpqRouter::getUpdQuery(int count) { @@ -204,23 +232,23 @@ void TeBkUmLpqRouter::updates(const char* q, int ql, std::vector if(queryCount<1)queryCount=1; else if(queryCount>500)queryCount=500; + wlst.reserve(queryCount); + LibpqDataSourceImpl* sqli = getDb(); + LibpqParamsBase* pars = sqli->getParams(queryCount*3); try { - std::vector pars; - std::vector qp; - std::string& query = getUpdQuery(queryCount); for (int c = 0; c < queryCount; ++c) { int rid = rand() % 10000 + 1; - qp.clear(); - LibpqDataSourceImpl::ADD_INT4(qp, rid); - TeBkUmLpqWorld w; - sqli->executeQuery(WORLD_ONE_QUERY, qp, &w, &TeBkUmLpqRouter::dbUtil); - wlst.push_back(w); + LibpqParams<1> qp; + qp.int4(rid); + wlst.emplace_back(); + TeBkUmLpqWorld& w = wlst.back(); + sqli->executeQuery(WORLD_ONE_QUERY, &qp, &w, &TeBkUmLpqRouter::dbUtil); - LibpqDataSourceImpl::ADD_INT4(pars, w.getId()); + pars->int4(w.getId()); int newRandomNumber = rand() % 10000 + 1; if(w.getRandomNumber() == newRandomNumber) { @@ -229,17 +257,19 @@ void TeBkUmLpqRouter::updates(const char* q, int ql, std::vector newRandomNumber = 1; } } - LibpqDataSourceImpl::ADD_INT4(pars, newRandomNumber); + pars->int4(newRandomNumber); w.setRandomNumber(newRandomNumber); } - for (int c = 0; c < queryCount; ++c) { - LibpqDataSourceImpl::ADD_INT4(pars, wlst.at(c).getId()); + for(std::vector::iterator it=wlst.begin(); it != wlst.end(); ++it) { + pars->int4((*it).getId()); } sqli->begin(); sqli->executeUpdateQuery(query, pars); sqli->commit(); + delete pars; } catch(const std::exception& e) { + delete pars; sqli->rollback(); throw e; } @@ -251,6 +281,8 @@ void TeBkUmLpqRouter::updatesMulti(const char* q, int ql, std::vector500)queryCount=500; + wlst.reserve(queryCount); + LibpqDataSourceImpl* sqli = getDb(); try { @@ -288,14 +320,12 @@ void TeBkUmLpqRouter::updatesMulti(const char* q, int ql, std::vectorss; - if(cn==0) { - updt->wlist->push_back(TeBkUmLpqWorld()); - } - TeBkUmLpqWorld& w = updt->wlist->at(updt->wlist->size()-1); int tmp = 0; strToNum(d, l, tmp); - if(cn==0)w.setId(tmp); - else { + if(cn==0) { + updt->wlist->emplace_back(tmp); + } else { + TeBkUmLpqWorld& w = updt->wlist->back(); int newRandomNumber = rand() % 10000 + 1; if(tmp == newRandomNumber) { newRandomNumber += 1; @@ -320,16 +350,15 @@ void TeBkUmLpqRouter::updateCache() { try { std::vector wlist; - std::vector pars; - sqli->executeQuery(WORLD_ALL_QUERY, pars, &wlist, &TeBkUmLpqRouter::updateCacheUtil); + sqli->executeQuery(WORLD_ALL_QUERY, NULL, &wlist, &TeBkUmLpqRouter::updateCacheUtil); - for (int c = 0; c < (int)wlist.size(); ++c) { - TeBkUmLpqWorld& w = wlist.at(c); + for(std::vector::iterator it=wlist.begin(); it != wlist.end(); ++it) { char str[12]; - sprintf(str, "%d;%d", w.getId(), w.getRandomNumber()); - cchi->setRaw(CastUtil::fromNumber(w.getId()), str); + sprintf(str, "%d;%d", (*it).getId(), (*it).getRandomNumber()); + cchi->setRaw(CastUtil::fromNumber((*it).getId()), str); } CacheManager::cleanImpl(cchi); + CacheManager::triggerAppInitCompletion(); } catch(const std::exception& e) { CacheManager::cleanImpl(cchi); throw e; @@ -337,10 +366,7 @@ void TeBkUmLpqRouter::updateCache() { } void TeBkUmLpqRouter::updateCacheUtil(void* ctx, int rn, std::vector& data) { std::vector* wlist = (std::vector*)ctx; - TeBkUmLpqWorld w; - w.setId(ntohl(*((uint32_t *) data.at(0).d))); - w.setRandomNumber(ntohl(*((uint32_t *) data.at(1).d))); - wlist->push_back(w); + wlist->emplace_back(ntohl(*((uint32_t *) data.at(0).d)), ntohl(*((uint32_t *) data.at(1).d))); } void TeBkUmLpqRouter::cachedWorlds(const char* q, int ql, std::vector& wlst) { @@ -349,25 +375,25 @@ void TeBkUmLpqRouter::cachedWorlds(const char* q, int ql, std::vector500)queryCount=500; + wlst.reserve(queryCount); + CacheInterface* cchi = CacheManager::getImpl(); try { std::vector keys, values; for (int c = 0; c < queryCount; ++c) { int rid = rand() % 10000 + 1; - keys.push_back(CastUtil::fromNumber(rid)); + keys.emplace_back(CastUtil::fromNumber(rid)); } cchi->mgetRaw(keys, values); for (int c = 0; c < queryCount; ++c) { - TeBkUmLpqWorld w; std::string& v = values.at(c); size_t fn = v.find(";"); int tmp = 0; strToNum(v.substr(0, fn).c_str(), fn, tmp); - w.setId(tmp); - strToNum(v.substr(fn+1).c_str(), v.length()-fn-1, tmp); - w.setRandomNumber(tmp); - wlst.push_back(w); + int tmp1 = 0; + strToNum(v.substr(fn+1).c_str(), v.length()-fn-1, tmp1); + wlst.emplace_back(tmp, tmp1); } CacheManager::cleanImpl(cchi); } catch(const std::exception& e) { @@ -376,40 +402,38 @@ void TeBkUmLpqRouter::cachedWorlds(const char* q, int ql, std::vector* flst = new std::vector; - std::vector pars; - sqli->executeQuery(FORTUNE_ALL_QUERY, pars, flst, &TeBkUmLpqRouter::getContextUtil); + Context ctx; + std::list flst; + sqli->executeQuery(FORTUNE_ALL_QUERY, NULL, &flst, &TeBkUmLpqRouter::getContextUtil); - TeBkUmLpqFortune nf; - nf.setId(0); - nf.setMessage("Additional fortune added at request time."); - flst->push_back(nf); - std::sort (flst->begin(), flst->end()); + flst.emplace_back(0, "Additional fortune added at request time."); + flst.sort(); - context->insert(std::pair("fortunes", flst)); + ctx.insert(std::pair("fortunes", &flst)); + + fcpstream str; + tmplFunc(&ctx, str); + res->setContent(str.str()); + res->setContentType(ContentTypes::CONTENT_TYPE_TEXT_SHTML); + res->setHTTPResponseStatus(HTTPResponseStatus::Ok); } catch(...) { throw; } } void TeBkUmLpqRouter::getContextUtil(void* ctx, int rn, int cn, char * d, int l) { - std::vector* flst = (std::vector*)ctx; + std::list* flst = (std::list*)ctx; if(cn==0) { - flst->push_back(TeBkUmLpqFortune()); - } - TeBkUmLpqFortune& w = flst->at(flst->size()-1); - if(cn==0)w.setId(ntohl(*((uint32_t *) d))); - else { - std::string nm = std::string(d, l); - CryptoHandler::sanitizeHtml(nm); - w.setMessage(nm); + flst->emplace_back(ntohl(*((uint32_t *) d))); + } else { + TeBkUmLpqFortune& w = flst->back(); + w.message = CryptoHandler::sanitizeHtmlFast((const uint8_t *)d, (size_t)l, w.message_i, w.allocd); } } - //https://stackoverflow.com/questions/9631225/convert-strings-specified-by-length-not-nul-terminated-to-int-float bool TeBkUmLpqRouter::strToNum(const char* str, int len, int& ret) { ret = 0; @@ -421,76 +445,63 @@ bool TeBkUmLpqRouter::strToNum(const char* str, int len, int& ret) { return true; } -bool TeBkUmLpqRouter::route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif) { - std::string_view path = req->getPath(); - if(StringUtil::endsWith(path, "/plaintext")) { +bool TeBkUmLpqRouter::route(HttpRequest* req, HttpResponse* res, SocketInterface* sif) { + if(StringUtil::endsWith(req->getPath(), "/plaintext")) { res->setContent(HELLO_WORLD); res->setContentType(ContentTypes::CONTENT_TYPE_TEXT_PLAIN); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } else if(StringUtil::endsWith(path, "/json")) { + } else if(StringUtil::endsWith(req->getPath(), "/json")) { TeBkUmLpqMessage msg; msg.setMessage(HELLO_WORLD); - JSONSerialize::serializeUnknown(&msg, 0, "TeBkUmLpqMessage", res->getContentP()); + JSONSerialize::serializeObject(&msg, m_ser, res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } else if(StringUtil::endsWith(path, "/db")) { + } else if(StringUtil::endsWith(req->getPath(), "/db")) { TeBkUmLpqWorld msg; db(msg); - JSONSerialize::serializeUnknown(&msg, 0, "TeBkUmLpqWorld", res->getContentP()); + JSONSerialize::serializeObject(&msg, w_ser, res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } else if(StringUtil::endsWith(path, "/queries_old")) { + } else if(StringUtil::endsWith(req->getPath(), "/queries_old")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; queries(params[0].val, params[0].val_len, msg); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } else if(StringUtil::endsWith(path, "/queries")) { + } else if(StringUtil::endsWith(req->getPath(), "/queries")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; queriesMulti(params[0].val, params[0].val_len, msg); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } else if(StringUtil::endsWith(path, "/fortunes")) { - Context ctx; - getContext(req, &ctx); - - void* mkr = dlsym(ddlib, TPE_FN_NAME.c_str()); - if(mkr!=NULL) - { - TeBkUmLpqTemplatePtr f = (TeBkUmLpqTemplatePtr)mkr; - std::string msg; - f(&ctx, msg); - res->setContent(msg); - res->setContentType(ContentTypes::CONTENT_TYPE_TEXT_SHTML); - res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } - } else if(StringUtil::endsWith(path, "/bupdates")) { + } else if(StringUtil::endsWith(req->getPath(), "/fortunes")) { + handleTemplate(res); + } else if(StringUtil::endsWith(req->getPath(), "/bupdates")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; updates(params[0].val, params[0].val_len, msg); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } else if(StringUtil::endsWith(path, "/updates")) { + } else if(StringUtil::endsWith(req->getPath(), "/updates")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; updatesMulti(params[0].val, params[0].val_len, msg); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - } else if(StringUtil::endsWith(path, "/cached-worlds")) { + } else if(StringUtil::endsWith(req->getPath(), "/cached-worlds")) { struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; cachedWorlds(params[0].val, params[0].val_len, msg); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); } else { @@ -500,15 +511,17 @@ bool TeBkUmLpqRouter::route(HttpRequest* req, HttpResponse* res, void* dlib, voi return true; } -std::string TeBkUmLpqRouter::APP_NAME = ""; -std::string TeBkUmLpqRouter::TPE_FN_NAME = ""; +TemplatePtr TeBkUmLpqRouter::tmplFunc; +Ser TeBkUmLpqRouter::m_ser; +Ser TeBkUmLpqRouter::w_ser; +SerCont TeBkUmLpqRouter::wcont_ser; TeBkUmLpqRouter::TeBkUmLpqRouter() { sqli = NULL; - if(APP_NAME=="") { - APP_NAME = CommonUtils::normalizeAppName("te-benchmark-um-pq"); - TPE_FN_NAME = CommonUtils::getTpeFnName("tpe/fortunes.tpe", "te-benchmark-um-pq"); - } + tmplFunc = TemplateUtil::getTemplateFunc("te-benchmark-um-pq", "tpe/fortunes.tpe"); + m_ser = Serializer::getSerFuncForObject("te-benchmark-um-pq", "TeBkUmLpqMessage"); + w_ser = Serializer::getSerFuncForObject("te-benchmark-um-pq", "TeBkUmLpqWorld"); + wcont_ser = Serializer::getSerFuncForObjectCont("te-benchmark-um-pq", "TeBkUmLpqWorld", "std::vector"); } TeBkUmLpqRouter::~TeBkUmLpqRouter() { diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/autotools/Makefile.am b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/autotools/Makefile.am index 233d7d9b994..2d4bfcd6e75 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/autotools/Makefile.am +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/src/autotools/Makefile.am @@ -8,7 +8,7 @@ distdir=${PACKAGE_NAME}-${PACKAGE_VERSION}-src fprefix=../../../../${packageIdentifier} prefix=${abs_builddir} -lib_LTLIBRARIES = libte_benchmark_um_pq.la +lib_LTLIBRARIES = libte-benchmark-um-pq.la libte_benchmark_um_pq_la_SOURCES = ../TeBkUmLpq.cpp libte_benchmark_um_pq_la_LDFLAGS = -no-undefined diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/tpe/fortunes.tpe b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/tpe/fortunes.tpe index 8d0e9d9093f..caa6cc8a7c4 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/tpe/fortunes.tpe +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/tpe/fortunes.tpe @@ -1,12 +1,12 @@ -#declare std::vector fortunes# +#declareref std::list* fortunes# Fortunes -#for(int i=0;i<(int)fortunes.size();i++)# - +#for(std::list::iterator it=fortunes->begin(); it != fortunes->end(); ++it)# + #rof#
idmessage
$_S{fortunes.at(i).getId()}${fortunes.at(i).getMessage()}
${(*it).getId()}${(*it).message}
diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um-pq/xmake.lua b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/xmake.lua new file mode 100644 index 00000000000..c755bb731e0 --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um-pq/xmake.lua @@ -0,0 +1,12 @@ +add_includedirs("include/") + +local bindir = "$(projectdir)/ffead-cpp-6.0-bin" + +target("te-benchmark-um-pq") + set_languages("c++17") + add_deps("ffead-framework") + add_options(getOptions()) + set_kind("shared") + on_load(setIncludes) + add_files("src/*.cpp") + set_installdir(bindir) diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/CMakeLists.txt b/frameworks/C++/ffead-cpp/te-benchmark-um/CMakeLists.txt index 9f335018096..060f045633d 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/CMakeLists.txt +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/CMakeLists.txt @@ -5,6 +5,9 @@ file(GLOB sources ) include_directories("${CMAKE_SOURCE_DIR}/web/te-benchmark-um/include") -add_library(te_benchmark_um SHARED ${sources}) -set_property(TARGET te_benchmark_um PROPERTY POSITION_INDEPENDENT_CODE ON) -target_link_libraries(te_benchmark_um ffead-modules ffead-framework ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) +if(BUILD_STATIC_LIBS OR EMSCRIPTEN) + add_library(te-benchmark-um STATIC ${sources}) +else() + add_library(te-benchmark-um ${sources}) +endif() +target_link_libraries(te-benchmark-um ffead-modules ffead-framework ${HAVE_CURLLIB} ${HAVE_SSLLIB} ${HAVE_MEMCACHEDLIB} ${HAVE_ODBCLIB} ${HAVE_MONGOCLIB} ${HAVE_BSONLIB} ${HAVE_ZLIB} ${HAVE_CRYPTOLIB}) diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/config/application.xml b/frameworks/C++/ffead-cpp/te-benchmark-um/config/application.xml index ec67cbd3949..d68931cd109 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/config/application.xml +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/config/application.xml @@ -1,4 +1,4 @@ - + * GET, POST, HEAD, PUT, DELETE diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/config/cache.xml b/frameworks/C++/ffead-cpp/te-benchmark-um/config/cache.xml deleted file mode 100644 index f1697538f2b..00000000000 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/config/cache.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - sumeet - sumeet - - - TeBkUmRouter.updateCache - Memory-Cached - memory - - \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/config/sdorm.xml b/frameworks/C++/ffead-cpp/te-benchmark-um/config/sdorm.xml deleted file mode 100644 index 734c2c5591a..00000000000 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/config/sdorm.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - localhost - 27017 - hello_world - - - 30 - MongoDB-DSN - mongo - - - diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/include/TeBkUm.h b/frameworks/C++/ffead-cpp/te-benchmark-um/include/TeBkUm.h index 1cdf670dca9..f473785e29e 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/include/TeBkUm.h +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/include/TeBkUm.h @@ -40,8 +40,6 @@ #include "yuarel.h" #include "Router.h" -typedef void (*TeBkUmTemplatePtr) (Context*, std::string&); - #pragma @Entity #pragma @Table name="fortune" class TeBkUmFortune { @@ -71,8 +69,11 @@ class TeBkUmRouter : public Router { static const std::string HELLO_WORLD; static std::string WORLD; - static std::string APP_NAME; - static std::string TPE_FN_NAME; + static TemplatePtr tmplFunc; + + static Ser m_ser; + static Ser w_ser; + static SerCont wcont_ser; bool strToNum(const char* str, int len, int& ret); void db(TeBkUmWorld&); @@ -84,7 +85,7 @@ class TeBkUmRouter : public Router { TeBkUmRouter(); virtual ~TeBkUmRouter(); void updateCache(); - bool route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif); + bool route(HttpRequest* req, HttpResponse* res, SocketInterface* sif); }; #endif /* WEB_TE_BENCHMARK_UM_INCLUDE_TeBkUm_H_ */ diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/meson.build b/frameworks/C++/ffead-cpp/te-benchmark-um/meson.build new file mode 100644 index 00000000000..2d7a697e5cb --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/meson.build @@ -0,0 +1,15 @@ + +module_includes = ['/usr/local/include','../../src/modules/common','../../src/modules/cache','../../src/modules/cache/memory','../../src/modules/cache/redis', + '../../src/modules/cache/memcached','../../src/modules/client-util','../../src/modules/http','../../src/modules/http/http11','../../src/modules/http/http20', + '../../src/modules/ioc','../../src/modules/jobs','../../src/modules/reflection','../../src/modules/sdorm','../../src/modules/sdorm/sql','../../src/modules/sdorm/sql/libpq', + '../../src/modules/client-util/ssl','../../src/modules/sdorm/mongo','../../src/modules/sdorm/mongo/raw','../../src/modules/search','../../src/modules/serialization', + '../../src/modules/serialization/xml','../../src/modules/serialization/json','../../src/modules/server-util','../../src/modules/ssl','../../src/modules/threads', + '../../src/framework','include'] +module_includes += global_includes +module_libs = [global_libs] + +c = run_command(meson_grabber, 'src/') +module_sources = c.stdout().strip().split('\n') + +shared_library('te-benchmark-um', module_sources, include_directories: module_includes, dependencies: module_libs, link_with: [libffeadmodules, libffeadframework], + install: true, install_dir: bin_dir+'/lib') \ No newline at end of file diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/src/TeBkUm.cpp b/frameworks/C++/ffead-cpp/te-benchmark-um/src/TeBkUm.cpp index 4f328446132..9c77c6fa03e 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/src/TeBkUm.cpp +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/src/TeBkUm.cpp @@ -153,6 +153,7 @@ void TeBkUmRouter::updateCache() { } DataSourceManager::cleanImpl(sqli); CacheManager::cleanImpl(cchi); + CacheManager::triggerAppInitCompletion(); } catch(const std::exception& e) { DataSourceManager::cleanImpl(sqli); CacheManager::cleanImpl(cchi); @@ -224,106 +225,60 @@ bool TeBkUmRouter::strToNum(const char* str, int len, int& ret) { return true; } -bool TeBkUmRouter::route(HttpRequest* req, HttpResponse* res, void* dlib, void* ddlib, SocketInterface* sif) { - //Timer t; - //t.start(); +bool TeBkUmRouter::route(HttpRequest* req, HttpResponse* res, SocketInterface* sif) { std::string_view path = req->getPath(); if(StringUtil::endsWith(path, "/plaintext")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); res->setContent(HELLO_WORLD); res->setContentType(ContentTypes::CONTENT_TYPE_TEXT_PLAIN); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/json")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); TeBkUmMessage msg; msg.setMessage(HELLO_WORLD); - JSONSerialize::serializeUnknown(&msg, 0, "TeBkUmMessage", res->getContentP()); + JSONSerialize::serializeObject(&msg, m_ser, res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/db")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); TeBkUmWorld msg; db(msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 0, "TeBkUmWorld", res->getContentP()); + JSONSerialize::serializeObject(&msg, w_ser, res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/queries")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; queries(params[0].val, params[0].val_len, msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/fortunes")) { Context ctx; getContext(req, &ctx); - void* mkr = dlsym(ddlib, TPE_FN_NAME.c_str()); - if(mkr!=NULL) + if(tmplFunc!=NULL) { - TeBkUmTemplatePtr f = (TeBkUmTemplatePtr)mkr; - std::string msg; - f(&ctx, msg); - res->setContent(msg); + fcpstream str; + tmplFunc(&ctx, str); + res->setContent(str.str()); res->setContentType(ContentTypes::CONTENT_TYPE_TEXT_SHTML); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); } } else if(StringUtil::endsWith(path, "/updates")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; updates(params[0].val, params[0].val_len, msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else if(StringUtil::endsWith(path, "/cached-worlds")) { - //t.end(); - //CommonUtils::tsContRstLkp += t.timerNanoSeconds(); - //t.start(); struct yuarel_param params[1]; yuarel_parse_query((char*)req->getQueryStr().data(), req->getQueryStr().size(), params, 1); std::vector msg; cachedWorlds(params[0].val, params[0].val_len, msg); - //t.end(); - //CommonUtils::tsContExec += t.timerNanoSeconds(); - //t.start(); - JSONSerialize::serializeUnknown(&msg, 100, "std::vector", res->getContentP()); + JSONSerialize::serializeObjectCont(&msg, wcont_ser, "vector", res->getContentP()); res->setContentType(ContentTypes::CONTENT_TYPE_APPLICATION_JSON); res->setHTTPResponseStatus(HTTPResponseStatus::Ok); - //t.end(); - //CommonUtils::tsContRstSer += t.timerNanoSeconds(); } else { res->setHTTPResponseStatus(HTTPResponseStatus::NotFound); } @@ -331,14 +286,16 @@ bool TeBkUmRouter::route(HttpRequest* req, HttpResponse* res, void* dlib, void* return true; } -std::string TeBkUmRouter::APP_NAME = ""; -std::string TeBkUmRouter::TPE_FN_NAME = ""; +TemplatePtr TeBkUmRouter::tmplFunc; +Ser TeBkUmRouter::m_ser; +Ser TeBkUmRouter::w_ser; +SerCont TeBkUmRouter::wcont_ser; TeBkUmRouter::TeBkUmRouter() { - if(APP_NAME=="") { - APP_NAME = CommonUtils::normalizeAppName("te-benchmark-um"); - TPE_FN_NAME = CommonUtils::getTpeFnName("tpe/fortunes.tpe", "te-benchmark-um"); - } + tmplFunc = TemplateUtil::getTemplateFunc("te-benchmark-um", "tpe/fortunes.tpe"); + m_ser = Serializer::getSerFuncForObject("te-benchmark-um", "TeBkUmMessage"); + w_ser = Serializer::getSerFuncForObject("te-benchmark-um", "TeBkUmWorld"); + wcont_ser = Serializer::getSerFuncForObjectCont("te-benchmark-um", "TeBkUmWorld", "std::vector"); } TeBkUmRouter::~TeBkUmRouter() { diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/src/autotools/Makefile.am b/frameworks/C++/ffead-cpp/te-benchmark-um/src/autotools/Makefile.am index 45a155e9ef4..af9f7d38b6a 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/src/autotools/Makefile.am +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/src/autotools/Makefile.am @@ -8,7 +8,7 @@ distdir=${PACKAGE_NAME}-${PACKAGE_VERSION}-src fprefix=../../../../${packageIdentifier} prefix=${abs_builddir} -lib_LTLIBRARIES = libte_benchmark_um.la +lib_LTLIBRARIES = libte-benchmark-um.la libte_benchmark_um_la_SOURCES = ../TeBkUmWorld.cpp \ ../TeBkUm.cpp diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/tpe/fortunes.tpe b/frameworks/C++/ffead-cpp/te-benchmark-um/tpe/fortunes.tpe index 2caa086e77e..50de7ef062c 100644 --- a/frameworks/C++/ffead-cpp/te-benchmark-um/tpe/fortunes.tpe +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/tpe/fortunes.tpe @@ -6,7 +6,7 @@ #for(int i=0;i<(int)fortunes.size();i++)# - + #rof#
idmessage
$_S{fortunes.at(i).getId()}${fortunes.at(i).getMessage()}
${fortunes.at(i).getId()}${fortunes.at(i).getMessage()}
diff --git a/frameworks/C++/ffead-cpp/te-benchmark-um/xmake.lua b/frameworks/C++/ffead-cpp/te-benchmark-um/xmake.lua new file mode 100644 index 00000000000..c639ec7457b --- /dev/null +++ b/frameworks/C++/ffead-cpp/te-benchmark-um/xmake.lua @@ -0,0 +1,12 @@ +add_includedirs("include/") + +local bindir = "$(projectdir)/ffead-cpp-6.0-bin" + +target("te-benchmark-um") + set_languages("c++17") + add_deps("ffead-framework") + add_options(getOptions()) + set_kind("shared") + on_load(setIncludes) + add_files("src/*.cpp") + set_installdir(bindir) diff --git a/frameworks/C/nginx/nginx.dockerfile b/frameworks/C/nginx/nginx.dockerfile index 24d3c0a50dc..e86d42ff5b6 100644 --- a/frameworks/C/nginx/nginx.dockerfile +++ b/frameworks/C/nginx/nginx.dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:20.04 +FROM ubuntu:21.04 ARG DEBIAN_FRONTEND=noninteractive diff --git a/frameworks/CSharp/aspnetcore-corert/PlatformBenchmarks/PlatformBenchmarks.csproj b/frameworks/CSharp/aspnetcore-corert/PlatformBenchmarks/PlatformBenchmarks.csproj index 9348c95e350..25fee196a4f 100644 --- a/frameworks/CSharp/aspnetcore-corert/PlatformBenchmarks/PlatformBenchmarks.csproj +++ b/frameworks/CSharp/aspnetcore-corert/PlatformBenchmarks/PlatformBenchmarks.csproj @@ -7,12 +7,18 @@ link - false Speed + true + + + true + false + false + false - + diff --git a/frameworks/CSharp/aspnetcore-corert/PlatformBenchmarks/rd.xml b/frameworks/CSharp/aspnetcore-corert/PlatformBenchmarks/rd.xml deleted file mode 100644 index 80ce435ee22..00000000000 --- a/frameworks/CSharp/aspnetcore-corert/PlatformBenchmarks/rd.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/frameworks/CSharp/aspnetcore/Benchmarks/Benchmarks.csproj b/frameworks/CSharp/aspnetcore/Benchmarks/Benchmarks.csproj index 74a985e985c..bb9accc6de6 100644 --- a/frameworks/CSharp/aspnetcore/Benchmarks/Benchmarks.csproj +++ b/frameworks/CSharp/aspnetcore/Benchmarks/Benchmarks.csproj @@ -12,11 +12,11 @@ - - + + - + diff --git a/frameworks/CSharp/aspnetcore/Benchmarks/Data/EfDb.cs b/frameworks/CSharp/aspnetcore/Benchmarks/Data/EfDb.cs index 808bcb9584a..538eae8337a 100644 --- a/frameworks/CSharp/aspnetcore/Benchmarks/Data/EfDb.cs +++ b/frameworks/CSharp/aspnetcore/Benchmarks/Data/EfDb.cs @@ -54,23 +54,19 @@ private static readonly Func> _firstWorld public async Task LoadMultipleUpdatesRows(int count) { var results = new World[count]; - int currentValue, newValue; - - var ids = Enumerable.Range(1, 10000).Select(x => _random.Next(1, 10001)).Distinct().Take(count).ToArray(); - + var usedIds = new HashSet(count); + for (var i = 0; i < count; i++) { - results[i] = await _firstWorldTrackedQuery(_dbContext, ids[i]); - - currentValue = results[i].RandomNumber; - + int id; do { - newValue = _random.Next(1, 10001); - } - while (newValue == currentValue); + id = _random.Next(1, 10001); + } while (!usedIds.Add(id)); + + results[i] = await _firstWorldTrackedQuery(_dbContext, id); - results[i].RandomNumber = newValue; + results[i].RandomNumber = _random.Next(1, 10001); _dbContext.Entry(results[i]).State = EntityState.Modified; } diff --git a/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.json b/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.json index f773d27c441..b4a36c9d8c4 100644 --- a/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.json +++ b/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.json @@ -1,4 +1,4 @@ { - "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=256;NoResetOnClose=true;Enlist=false;Max Auto Prepare=3", + "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=18;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000", "Database": "postgresql" } diff --git a/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.updates.json b/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.updates.json index f31a0c5cec9..b4a36c9d8c4 100644 --- a/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.updates.json +++ b/frameworks/CSharp/aspnetcore/Benchmarks/appsettings.postgresql.updates.json @@ -1,4 +1,4 @@ { - "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=64;NoResetOnClose=true;Enlist=false;Max Auto Prepare=3", + "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=18;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000", "Database": "postgresql" } diff --git a/frameworks/CSharp/aspnetcore/PlatformBenchmarks/PlatformBenchmarks.csproj b/frameworks/CSharp/aspnetcore/PlatformBenchmarks/PlatformBenchmarks.csproj index 17239a7914f..9c06974e58b 100644 --- a/frameworks/CSharp/aspnetcore/PlatformBenchmarks/PlatformBenchmarks.csproj +++ b/frameworks/CSharp/aspnetcore/PlatformBenchmarks/PlatformBenchmarks.csproj @@ -15,6 +15,6 @@ - + diff --git a/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.json b/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.json index e6521654583..b4a36c9d8c4 100644 --- a/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.json +++ b/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.json @@ -1,4 +1,4 @@ { - "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=256;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000", + "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=18;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000", "Database": "postgresql" } diff --git a/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.updates.json b/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.updates.json index 09584f4549b..b4a36c9d8c4 100644 --- a/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.updates.json +++ b/frameworks/CSharp/aspnetcore/PlatformBenchmarks/appsettings.postgresql.updates.json @@ -1,4 +1,4 @@ { - "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=64;NoResetOnClose=true;Enlist=false;Max Auto Prepare=3;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000", + "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=18;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000", "Database": "postgresql" } diff --git a/frameworks/CSharp/aspnetcore/aspcore-mvc-ef-pg-up.dockerfile b/frameworks/CSharp/aspnetcore/aspcore-mvc-ef-pg-up.dockerfile deleted file mode 100644 index ccb8aa01186..00000000000 --- a/frameworks/CSharp/aspnetcore/aspcore-mvc-ef-pg-up.dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM mcr.microsoft.com/dotnet/sdk:5.0 AS build -WORKDIR /app -COPY Benchmarks . -RUN dotnet publish -c Release -o out - -FROM mcr.microsoft.com/dotnet/aspnet:5.0 AS runtime -ENV ASPNETCORE_URLS http://+:8080 -WORKDIR /app -COPY --from=build /app/out ./ -COPY Benchmarks/appsettings.postgresql.updates.json ./appsettings.json - -EXPOSE 8080 - -ENTRYPOINT ["dotnet", "Benchmarks.dll", "scenarios=MvcDbMultiUpdateEf"] \ No newline at end of file diff --git a/frameworks/CSharp/aspnetcore/aspcore-mw-ef-pg-up.dockerfile b/frameworks/CSharp/aspnetcore/aspcore-mw-ef-pg-up.dockerfile deleted file mode 100644 index 592953f3a7b..00000000000 --- a/frameworks/CSharp/aspnetcore/aspcore-mw-ef-pg-up.dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM mcr.microsoft.com/dotnet/sdk:5.0 AS build -WORKDIR /app -COPY Benchmarks . -RUN dotnet publish -c Release -o out - -FROM mcr.microsoft.com/dotnet/aspnet:5.0 AS runtime -ENV ASPNETCORE_URLS http://+:8080 -WORKDIR /app -COPY --from=build /app/out ./ -COPY Benchmarks/appsettings.postgresql.updates.json ./appsettings.json - -EXPOSE 8080 - -ENTRYPOINT ["dotnet", "Benchmarks.dll", "scenarios=DbMultiUpdateEf"] \ No newline at end of file diff --git a/frameworks/CSharp/aspnetcore/benchmark_config.json b/frameworks/CSharp/aspnetcore/benchmark_config.json index d7dcaff7afd..e9d62a568ae 100644 --- a/frameworks/CSharp/aspnetcore/benchmark_config.json +++ b/frameworks/CSharp/aspnetcore/benchmark_config.json @@ -55,7 +55,7 @@ "webserver": "Kestrel", "os": "Linux", "database_os": "Linux", - "display_name": "ASP.NET Core [Platform,Pg]", + "display_name": "ASP.NET Core [Platform, Pg]", "notes": "", "versus": "aspcore-ado-pg-up" }, @@ -153,24 +153,6 @@ "notes": "", "versus": "aspcore-ado-pg" }, - "mw-ef-pg-up": { - "update_url": "/updates/ef?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "Postgres", - "framework": "ASP.NET Core", - "language": "C#", - "orm": "Full", - "platform": ".NET", - "flavor": "CoreCLR", - "webserver": "Kestrel", - "os": "Linux", - "database_os": "Linux", - "display_name": "ASP.NET Core [Middleware, Pg, EF]", - "notes": "", - "versus": "aspcore-ado-pg" - }, "mw-dap-pg": { "db_url": "/db/dapper", "query_url": "/queries/dapper?queries=", @@ -328,24 +310,6 @@ "notes": "", "versus": "aspcore-ado-pg" }, - "mvc-ef-pg-up": { - "update_url": "/mvc/updates/ef?queries=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "ASP.NET Core", - "language": "C#", - "orm": "Full", - "platform": ".NET", - "flavor": "CoreCLR", - "webserver": "Kestrel", - "os": "Linux", - "database_os": "Linux", - "display_name": "ASP.NET Core [MVC, Pg, EF]", - "notes": "", - "versus": "aspcore-ado-pg" - }, "mvc-dap-pg": { "db_url": "/mvc/db/dapper", "query_url": "/mvc/queries/dapper?queries=", diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs index cdfa410e9c4..bc7ecc5a134 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/HttpServer.cs @@ -21,15 +21,7 @@ public virtual Task StartAsync(CancellationToken cancellationToken) serverOptions.DefaultListen.Port = 8080; serverOptions.Statistical = false; serverOptions.BufferPoolMaxMemory = 1000; - serverOptions.BufferPoolSize = 1024 * 10; - if (Program.Debug) - { - serverOptions.BufferSize = 1024 * 16; - } - else - { - serverOptions.BufferSize = 1024 * 8; - } + serverOptions.BufferPoolSize = 1024 * 16; ApiServer = SocketFactory.CreateTcpServer(serverOptions); ApiServer.Open(); if (!Program.UpDB) @@ -43,7 +35,7 @@ public virtual Task StartAsync(CancellationToken cancellationToken) RawDb._connectionString = "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=64;NoResetOnClose=true;Enlist=false;Max Auto Prepare=3;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000"; // RawDb._connectionString = "Server=192.168.2.19;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=64;NoResetOnClose=true;Enlist=false;Max Auto Prepare=3"; } - ApiServer.Log(LogType.Info, null, $"Debug mode [{Program.Debug}]"); + // ApiServer.Log(LogType.Info, null, $"Debug mode [{Program.Debug}]"); return Task.CompletedTask; } diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/Program.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/Program.cs index 37c763e9471..774acc842ee 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/Program.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/Program.cs @@ -7,13 +7,13 @@ namespace PlatformBenchmarks class Program { - public static bool Debug = false; + //public static bool Debug = false; public static bool UpDB = false; public static void Main(string[] args) { - Debug = (args != null && args.Length > 0 && args[0] == "debug"); + //Debug = (args != null && args.Length > 0 && args[0] == "debug"); UpDB = (args != null && args.Length > 0 && args[0] == "updb"); new HostBuilder().ConfigureServices(delegate (HostBuilderContext hostContext, IServiceCollection services) { diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/db.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/db.cs index ce3d7e42ca5..237475452da 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/db.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/db.cs @@ -16,14 +16,8 @@ public async ValueTask db(PipeStream stream, HttpToken token, ISession session) try { var data = await token.Db.LoadSingleQueryRow(); - if (Program.Debug) - { - await JsonSerializer.NonGeneric.Utf8.SerializeAsync(data, stream); - } - else - { - System.Text.Json.JsonSerializer.Serialize(GetUtf8JsonWriter(stream, token), data, SerializerOptions); - } + + System.Text.Json.JsonSerializer.Serialize(GetUtf8JsonWriter(stream, token), data, SerializerOptions); } catch (Exception e_) { diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs index 74de96aed0f..0fda0ba0e5f 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/fortunes.cs @@ -37,35 +37,21 @@ public async Task fortunes(PipeStream stream, HttpToken token, ISession session) { var data = await token.Db.LoadFortunesRows(); - if (Program.Debug) - { - var html = token.GetHtmlBufferWriter(); - html.Reset(); - html.Write(_fortunesTableStart.Data, 0, _fortunesTableStart.Length); - foreach (var item in data) - { - html.Write(_fortunesRowStart.Data, 0, _fortunesRowStart.Length); - WriteNumeric(html, (uint)item.Id); - html.Write(_fortunesColumn.Data, 0, _fortunesColumn.Length); - html.Write(HtmlEncoder.Encode(item.Message)); - html.Write(_fortunesRowEnd.Data, 0, _fortunesRowEnd.Length); - } - html.Write(_fortunesTableEnd.Data, 0, _fortunesTableEnd.Length); - stream.Write(html.Data, 0, html.Length); - } - else + + var html = token.GetHtmlBufferWriter(); + html.Reset(); + html.Write(_fortunesTableStart.Data, 0, _fortunesTableStart.Length); + foreach (var item in data) { - stream.Write(_fortunesTableStart.Data, 0, _fortunesTableStart.Length); - foreach (var item in data) - { - stream.Write(_fortunesRowStart.Data, 0, _fortunesRowStart.Length); - WriteNumeric(stream, (uint)item.Id); - stream.Write(_fortunesColumn.Data, 0, _fortunesColumn.Length); - stream.Write(HtmlEncoder.Encode(item.Message)); - stream.Write(_fortunesRowEnd.Data, 0, _fortunesRowEnd.Length); - } - stream.Write(_fortunesTableEnd.Data, 0, _fortunesTableEnd.Length); + html.Write(_fortunesRowStart.Data, 0, _fortunesRowStart.Length); + WriteNumeric(html, (uint)item.Id); + html.Write(_fortunesColumn.Data, 0, _fortunesColumn.Length); + html.Write(HtmlEncoder.Encode(item.Message)); + html.Write(_fortunesRowEnd.Data, 0, _fortunesRowEnd.Length); } + html.Write(_fortunesTableEnd.Data, 0, _fortunesTableEnd.Length); + stream.Write(html.Data, 0, html.Length); + } catch (Exception e_) { diff --git a/frameworks/CSharp/beetlex/PlatformBenchmarks/json.cs b/frameworks/CSharp/beetlex/PlatformBenchmarks/json.cs index eeb42ae48dd..366346e750d 100644 --- a/frameworks/CSharp/beetlex/PlatformBenchmarks/json.cs +++ b/frameworks/CSharp/beetlex/PlatformBenchmarks/json.cs @@ -32,14 +32,7 @@ private static Utf8JsonWriter GetUtf8JsonWriter(PipeStream stream, HttpToken tok public ValueTask Json(PipeStream stream, HttpToken token, ISession session) { - if (Program.Debug) - { - System.Text.Json.JsonSerializer.Serialize(GetUtf8JsonWriter(stream, token), new JsonMessage { message = "Hello, World!" }, SerializerOptions); - } - else - { - SpanJson.JsonSerializer.NonGeneric.Utf8.SerializeAsync(new JsonMessage { message = "Hello, World!" }, stream); - } + System.Text.Json.JsonSerializer.Serialize(GetUtf8JsonWriter(stream, token), new JsonMessage { message = "Hello, World!" }, SerializerOptions); OnCompleted(stream, session, token); return ValueTask.CompletedTask; } diff --git a/frameworks/CSharp/beetlex/beetlex-debug.dockerfile b/frameworks/CSharp/beetlex/beetlex-debug.dockerfile deleted file mode 100644 index 98d247f328f..00000000000 --- a/frameworks/CSharp/beetlex/beetlex-debug.dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM mcr.microsoft.com/dotnet/sdk:5.0-alpine AS build -WORKDIR /app -COPY PlatformBenchmarks . -RUN dotnet publish -c Release -o out - -FROM mcr.microsoft.com/dotnet/aspnet:5.0 AS runtime -ENV DOTNET_SYSTEM_NET_SOCKETS_INLINE_COMPLETIONS 1 -WORKDIR /app -COPY --from=build /app/out ./ - -EXPOSE 8080 - -ENTRYPOINT ["dotnet", "PlatformBenchmarks.dll","debug"] diff --git a/frameworks/CSharp/beetlex/benchmark_config.json b/frameworks/CSharp/beetlex/benchmark_config.json index b31ef0c6c80..b5e85bfbf49 100644 --- a/frameworks/CSharp/beetlex/benchmark_config.json +++ b/frameworks/CSharp/beetlex/benchmark_config.json @@ -64,26 +64,7 @@ "webserver": "beetlex", "os": "Linux", "database_os": "Linux", - "display_name": "beetlex-core", - "notes": "", - "versus": "aspcore" - }, - "debug": { - "plaintext_url": "/plaintext", - "json_url": "/json", - "port": 8080, - "approach": "Realistic", - "classification": "Platform", - "database": "Postgres", - "framework": "beetlex", - "language": "C#", - "orm": "Raw", - "platform": ".NET", - "flavor": "CoreCLR", - "webserver": "beetlex", - "os": "Linux", - "database_os": "Linux", - "display_name": "beetlex-core", + "display_name": "beetlex-core-updb", "notes": "", "versus": "aspcore" } diff --git a/frameworks/CSharp/beetlex/config.toml b/frameworks/CSharp/beetlex/config.toml index e5f4c328a06..3e389ec3a67 100644 --- a/frameworks/CSharp/beetlex/config.toml +++ b/frameworks/CSharp/beetlex/config.toml @@ -36,20 +36,6 @@ platform = ".NET" webserver = "beetlex" versus = "aspcore" -[debug] -urls.plaintext = "/plaintext" -urls.json = "/json" -urls.query = "/queries?queries=" -approach = "Realistic" -classification = "Platform" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = ".NET" -webserver = "beetlex" -versus = "aspcore" - [core-updb] urls.update = "/updates?queries=" urls.fortune = "/fortunes" diff --git a/frameworks/CSharp/easyrpc/Benchmarks/Benchmarks.csproj b/frameworks/CSharp/easyrpc/Benchmarks/Benchmarks.csproj index 6e7b169100a..33d112263f7 100644 --- a/frameworks/CSharp/easyrpc/Benchmarks/Benchmarks.csproj +++ b/frameworks/CSharp/easyrpc/Benchmarks/Benchmarks.csproj @@ -5,9 +5,9 @@ - - - - + + + + diff --git a/frameworks/CSharp/easyrpc/Benchmarks/appsettings.json b/frameworks/CSharp/easyrpc/Benchmarks/appsettings.json index 002010a0341..bca0e3446ca 100644 --- a/frameworks/CSharp/easyrpc/Benchmarks/appsettings.json +++ b/frameworks/CSharp/easyrpc/Benchmarks/appsettings.json @@ -1,3 +1,3 @@ { - "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=256;NoResetOnClose=true;Enlist=false;Max Auto Prepare=3" + "ConnectionString": "Server=tfb-database;Database=hello_world;User Id=benchmarkdbuser;Password=benchmarkdbpass;Maximum Pool Size=256;NoResetOnClose=true;Enlist=false;Max Auto Prepare=4;Multiplexing=true;Write Coalescing Delay Us=500;Write Coalescing Buffer Threshold Bytes=1000" } diff --git a/frameworks/CSharp/easyrpc/README.md b/frameworks/CSharp/easyrpc/README.md index f38981ecac6..c5ed8548892 100755 --- a/frameworks/CSharp/easyrpc/README.md +++ b/frameworks/CSharp/easyrpc/README.md @@ -5,7 +5,7 @@ This includes tests for plaintext and json serialization. **Language** -* C# 7.0 +* C# 7.0 **Platforms** diff --git a/frameworks/Crystal/crystal/crystal-radix.dockerfile b/frameworks/Crystal/crystal/crystal-radix.dockerfile index 79745546f4b..aaef65c87d9 100644 --- a/frameworks/Crystal/crystal/crystal-radix.dockerfile +++ b/frameworks/Crystal/crystal/crystal-radix.dockerfile @@ -1,18 +1,19 @@ -FROM crystallang/crystal:0.26.1 +FROM crystallang/crystal:1.0.0 WORKDIR /crystal +COPY shard.yml shard.yml +COPY shard.lock shard.lock +RUN shards install + COPY views views -COPY run-radix.sh run-radix.sh +COPY run.sh run.sh COPY server_radix.cr server_radix.cr -COPY shard.lock shard.lock -COPY shard.yml shard.yml ENV GC_MARKERS 1 -ENV DATABASE_URL postgres://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?initial_pool_size=56&max_pool_size=56&max_idle_pool_size=56 +ENV DATABASE_URL postgres://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?initial_pool_size=56&max_idle_pool_size=56 -RUN shards install -RUN crystal build --release --no-debug server_radix.cr -o server_radix.out +RUN crystal build --release --no-debug server_radix.cr -o server.out EXPOSE 8080 -CMD bash run-radix.sh +CMD bash run.sh diff --git a/frameworks/Crystal/crystal/crystal.dockerfile b/frameworks/Crystal/crystal/crystal.dockerfile index 2f81841a544..ab51249b129 100644 --- a/frameworks/Crystal/crystal/crystal.dockerfile +++ b/frameworks/Crystal/crystal/crystal.dockerfile @@ -1,16 +1,17 @@ -FROM crystallang/crystal:0.26.1 +FROM crystallang/crystal:1.0.0 WORKDIR /crystal +COPY shard.yml shard.yml +COPY shard.lock shard.lock +RUN shards install + COPY views views COPY run.sh run.sh COPY server.cr server.cr -COPY shard.lock shard.lock -COPY shard.yml shard.yml ENV GC_MARKERS 1 -ENV DATABASE_URL postgres://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?initial_pool_size=56&max_pool_size=56&max_idle_pool_size=56 +ENV DATABASE_URL postgres://benchmarkdbuser:benchmarkdbpass@tfb-database:5432/hello_world?initial_pool_size=56&max_idle_pool_size=56 -RUN shards install RUN crystal build --release --no-debug server.cr -o server.out EXPOSE 8080 diff --git a/frameworks/Crystal/crystal/run-radix.sh b/frameworks/Crystal/crystal/run-radix.sh deleted file mode 100644 index dc800f011a0..00000000000 --- a/frameworks/Crystal/crystal/run-radix.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -for i in $(seq 1 $(nproc --all)); do - ./server_radix.out & -done - -wait diff --git a/frameworks/Crystal/crystal/server.cr b/frameworks/Crystal/crystal/server.cr index 84ee8cced8c..b15db07c6c5 100644 --- a/frameworks/Crystal/crystal/server.cr +++ b/frameworks/Crystal/crystal/server.cr @@ -5,88 +5,73 @@ require "ecr" APPDB = DB.open(ENV["DATABASE_URL"]) ID_MAXIMUM = 10_000 +CONTENT_HTML = "text/html; charset=UTF-8" +CONTENT_JSON = "application/json" +CONTENT_TEXT = "text/plain" server = HTTP::Server.new do |context| response = context.response request = context.request response.headers["Server"] = "Crystal" - response.headers["Date"] = HTTP.format_time(Time.now) + response.headers["Date"] = HTTP.format_time(Time.local) case request.path when "/json" response.status_code = 200 - response.headers["Content-Type"] = "application/json" - {message: "Hello, World!"}.to_json(response) + response.headers["Content-Type"] = CONTENT_JSON + json = {message: "Hello, World!"}.to_json + response.print(json) when "/plaintext" response.status_code = 200 - response.headers["Content-Type"] = "text/plain" + response.headers["Content-Type"] = CONTENT_TEXT response.print "Hello, World!" when "/db" response.status_code = 200 - response.headers["Content-Type"] = "application/json" - random_world.to_json(response) + response.headers["Content-Type"] = CONTENT_JSON + json = find_world(rand(1..ID_MAXIMUM)).to_json + response.print(json) when "/queries" response.status_code = 200 - response.headers["Content-Type"] = "application/json" + response.headers["Content-Type"] = CONTENT_JSON - JSON.build(response) do |json| - json.array do - sanitized_query_count(request).times do - random_world.to_json(json) - end - end - end + worlds = (1..sanitized_query_count(request)).map { find_world(rand(1..ID_MAXIMUM)) } + response.print(worlds.to_json) when "/fortunes" response.status_code = 200 - response.headers["Content-Type"] = "text/html; charset=UTF-8" + response.headers["Content-Type"] = CONTENT_HTML + + data = APPDB.query_all("SELECT id, message FROM Fortune", as: {id: Int32, message: String}) - data = fortunes additional_fortune = { id: 0, message: "Additional fortune added at request time.", } data.push(additional_fortune) - data.sort! { |f1, f2| f1[:message] <=> f2[:message] } + data.sort_by! { |fortune| fortune[:message] } ECR.embed "views/fortunes.ecr", response when "/updates" response.status_code = 200 - response.headers["Content-Type"] = "application/json" - - JSON.build(response) do |json| - json.array do - sanitized_query_count(request).times do - world = set_world({id: random_world[:id], randomNumber: rand(1..ID_MAXIMUM)}) - world.to_json(json) - end + response.headers["Content-Type"] = CONTENT_JSON + worlds = (1..sanitized_query_count(request)).map do + world = find_world(rand(1..ID_MAXIMUM)) + random_number = rand(1..ID_MAXIMUM) + while random_number == world[:randomNumber] + random_number = rand(1..ID_MAXIMUM) end + APPDB.exec("UPDATE world SET randomNumber = $1 WHERE id = $2", random_number, world[:id]) + {id: world[:id], randomNumber: random_number} end + response.print(worlds.to_json) else response.status_code = 404 end end -private def random_world - id = rand(1..ID_MAXIMUM) - random_number = APPDB.query_one("SELECT id, randomNumber FROM world WHERE id = $1", id, as: Int32) - {id: id, randomNumber: random_number} -end - -private def set_world(world) - APPDB.exec("UPDATE world SET randomNumber = $1 WHERE id = $2", world[:randomNumber], world[:id]) - world -end - -private def fortunes - data = Array(NamedTuple(id: Int32, message: String)).new - - APPDB.query_each("SELECT id, message FROM Fortune") do |rs| - data.push({id: rs.read(Int32), message: rs.read(String)}) - end - - data +private def find_world(id : Int32) + APPDB.query_one("SELECT id, randomNumber FROM world WHERE id = $1", id, as: {id: Int32, randomNumber: Int32}) end private def sanitized_query_count(request) diff --git a/frameworks/Crystal/crystal/server_radix.cr b/frameworks/Crystal/crystal/server_radix.cr index 1d5f8ef47c8..ce67edc6c6b 100644 --- a/frameworks/Crystal/crystal/server_radix.cr +++ b/frameworks/Crystal/crystal/server_radix.cr @@ -4,81 +4,84 @@ require "pg" require "ecr" require "radix" +APPDB = DB.open(ENV["DATABASE_URL"]) +ID_MAXIMUM = 10_000 +CONTENT_HTML = "text/html; charset=UTF-8" +CONTENT_JSON = "application/json" +CONTENT_TEXT = "text/plain" + plaintext_handler = ->(context : HTTP::Server::Context) do - response = context.response - response.status_code = 200 - response.headers["Content-Type"] = "text/plain" - response.print "Hello, World!" - return response + context.response.tap do |response| + response.status_code = 200 + response.headers["Content-Type"] = CONTENT_TEXT + response.print "Hello, World!" + end end json_handler = ->(context : HTTP::Server::Context) do - response = context.response - response.status_code = 200 - response.headers["Content-Type"] = "application/json" - {message: "Hello, World!"}.to_json(response) - return response + context.response.tap do |response| + response.status_code = 200 + response.headers["Content-Type"] = CONTENT_JSON + json = {message: "Hello, World!"}.to_json + response.print(json) + end end db_handler = ->(context : HTTP::Server::Context) do - request = context.request - response = context.response - response.status_code = 200 - response.headers["Content-Type"] = "application/json" - random_world.to_json(response) - return response + context.response.tap do |response| + response.status_code = 200 + response.headers["Content-Type"] = CONTENT_JSON + json = find_world(rand(1..ID_MAXIMUM)).to_json + response.print(json) + end end queries_handler = ->(context : HTTP::Server::Context) do request = context.request - response = context.response - response.status_code = 200 - response.headers["Content-Type"] = "application/json" + context.response.tap do |response| + response.status_code = 200 + response.headers["Content-Type"] = CONTENT_JSON - JSON.build(response) do |json| - json.array do - sanitized_query_count(request).times do - random_world.to_json(json) - end - end + worlds = (1..sanitized_query_count(request)).map { find_world(rand(1..ID_MAXIMUM)) } + response.print(worlds.to_json) end - return response end fortunes_handler = ->(context : HTTP::Server::Context) do - request = context.request - response = context.response - response.status_code = 200 - response.headers["Content-Type"] = "text/html; charset=UTF-8" + context.response.tap do |response| + response.status_code = 200 + response.headers["Content-Type"] = CONTENT_HTML - data = fortunes - additional_fortune = { - id: 0, - message: "Additional fortune added at request time.", - } + data = APPDB.query_all("SELECT id, message FROM Fortune", as: {id: Int32, message: String}) + additional_fortune = { + id: 0, + message: "Additional fortune added at request time.", + } - data.push(additional_fortune) - data.sort! { |f1, f2| f1[:message] <=> f2[:message] } + data.push(additional_fortune) + data.sort_by! { |fortune| fortune[:message] } - ECR.embed "views/fortunes.ecr", response - return response + ECR.embed "views/fortunes.ecr", response + end end updates_handler = ->(context : HTTP::Server::Context) do request = context.request - response = context.response - response.status_code = 200 - response.headers["Content-Type"] = "application/json" - - JSON.build(response) do |json| - json.array do - sanitized_query_count(request).times do - world = set_world({id: random_world[:id], randomNumber: rand(1..ID_MAXIMUM)}) - world.to_json(json) + context.response.tap do |response| + response.status_code = 200 + response.headers["Content-Type"] = "application/json" + + worlds = (1..sanitized_query_count(request)).map do + world = find_world(rand(1..ID_MAXIMUM)) + random_number = rand(1..ID_MAXIMUM) + while random_number == world[:randomNumber] + random_number = rand(1..ID_MAXIMUM) end + APPDB.exec("UPDATE world SET randomNumber = $1 WHERE id = $2", random_number, world[:id]) + {id: world[:id], randomNumber: random_number} end + response.print(worlds.to_json) end - return response end tree = Radix::Tree(Proc(HTTP::Server::Context, HTTP::Server::Response)).new @@ -89,14 +92,11 @@ tree.add "/queries", queries_handler tree.add "/fortunes", fortunes_handler tree.add "/updates", updates_handler -APPDB = DB.open(ENV["DATABASE_URL"]) -ID_MAXIMUM = 10_000 - server = HTTP::Server.new do |context| request = context.request response = context.response response.headers["Server"] = "Crystal" - response.headers["Date"] = HTTP.format_time(Time.now) + response.headers["Date"] = HTTP.format_time(Time.local) result = tree.find(request.path) @@ -107,25 +107,8 @@ server = HTTP::Server.new do |context| end end -private def random_world - id = rand(1..ID_MAXIMUM) - random_number = APPDB.query_one("SELECT id, randomNumber FROM world WHERE id = $1", id, as: Int32) - {id: id, randomNumber: random_number} -end - -private def set_world(world) - APPDB.exec("UPDATE world SET randomNumber = $1 WHERE id = $2", world[:randomNumber], world[:id]) - world -end - -private def fortunes - data = Array(NamedTuple(id: Int32, message: String)).new - - APPDB.query_each("SELECT id, message FROM Fortune") do |rs| - data.push({id: rs.read(Int32), message: rs.read(String)}) - end - - data +private def find_world(id : Int32) + APPDB.query_one("SELECT id, randomNumber FROM world WHERE id = $1", id, as: {id: Int32, randomNumber: Int32}) end private def sanitized_query_count(request) diff --git a/frameworks/Crystal/crystal/shard.lock b/frameworks/Crystal/crystal/shard.lock index 0884fd9dddb..9e03fdebaf7 100644 --- a/frameworks/Crystal/crystal/shard.lock +++ b/frameworks/Crystal/crystal/shard.lock @@ -1,14 +1,14 @@ -version: 1.0 +version: 2.0 shards: db: - github: crystal-lang/crystal-db - version: 0.5.0 + git: https://github.com/crystal-lang/crystal-db.git + version: 0.10.1 pg: - github: will/crystal-pg - version: 0.15.0 + git: https://github.com/will/crystal-pg.git + version: 0.23.2 radix: - github: luislavena/radix - version: 0.3.8 + git: https://github.com/luislavena/radix.git + version: 0.4.1 diff --git a/frameworks/Crystal/crystal/shard.yml b/frameworks/Crystal/crystal/shard.yml index 5c886c66b81..6bea5905b84 100644 --- a/frameworks/Crystal/crystal/shard.yml +++ b/frameworks/Crystal/crystal/shard.yml @@ -2,12 +2,12 @@ name: crystal version: 0.0.1 license: MIT +crystal: "~> 1.0" dependencies: pg: github: will/crystal-pg - version: 0.15.0 - + version: ~> 0.23.2 radix: github: luislavena/radix - version: 0.3.8 + version: ~> 0.4.1 diff --git a/frameworks/Crystal/grip/grip.cr b/frameworks/Crystal/grip/grip.cr index 2186220c4cf..99f3cc60699 100644 --- a/frameworks/Crystal/grip/grip.cr +++ b/frameworks/Crystal/grip/grip.cr @@ -120,7 +120,7 @@ class Fortunes < Grip::Controllers::Http end class Application < Grip::Application - def initialize + def routes get "/json", Json get "/plaintext", Plaintext get "/db", Db @@ -128,13 +128,29 @@ class Application < Grip::Application get "/updates", Updates get "/fortunes", Fortunes end -end -app = Application.new + def router : Array(HTTP::Handler) + [ + @http_handler, + ] of HTTP::Handler + end -Grip.config.logging = false + def server : HTTP::Server + HTTP::Server.new(@router) + end + + def reuse_port + true + end -app.run(8080) do |config| - server = config.server.not_nil! - server.bind_tcp "0.0.0.0", 8080, reuse_port: true + def host + "0.0.0.0" + end + + def port + 8080 + end end + +app = Application.new +app.run diff --git a/frameworks/Crystal/grip/grip.dockerfile b/frameworks/Crystal/grip/grip.dockerfile index 1830f24bdd3..244aa5dbf70 100644 --- a/frameworks/Crystal/grip/grip.dockerfile +++ b/frameworks/Crystal/grip/grip.dockerfile @@ -1,4 +1,4 @@ -FROM crystallang/crystal:0.34.0 +FROM crystallang/crystal:1.0.0 WORKDIR /grip COPY views views diff --git a/frameworks/Crystal/grip/run.sh b/frameworks/Crystal/grip/run.sh old mode 100644 new mode 100755 index fcccbd1eb10..bcf63c4c9d0 --- a/frameworks/Crystal/grip/run.sh +++ b/frameworks/Crystal/grip/run.sh @@ -1,7 +1,7 @@ #!/bin/bash for i in $(seq 1 $(nproc --all)); do - ./grip -p 8080 & + ./grip & done wait diff --git a/frameworks/Crystal/grip/shard.yml b/frameworks/Crystal/grip/shard.yml index 31d6b106a69..a1be1fda0fb 100644 --- a/frameworks/Crystal/grip/shard.yml +++ b/frameworks/Crystal/grip/shard.yml @@ -1,17 +1,12 @@ -name: hoge -version: 0.1.0 +name: grip +version: 0.1.1 dependencies: grip: github: grip-framework/grip - jwt: - github: crystal-community/jwt - version: 1.4.1 - pg: github: will/crystal-pg - version: 0.20.0 targets: grip: diff --git a/frameworks/Dart/dart/README.md b/frameworks/Dart/dart/README.md index 21e73819c80..78147e92537 100644 --- a/frameworks/Dart/dart/README.md +++ b/frameworks/Dart/dart/README.md @@ -4,12 +4,12 @@ This is the dart portion of a [benchmarking test suite](../) comparing a variety ## Versions -* [Dart SDK version >=1.6.0](http://www.dartlang.org/) -* [Dart args version 0.12.0+2](http://pub.dartlang.org/packages/args) -* [Dart crypto version 0.9.0](http://pub.dartlang.org/packages/crypto) -* [Dart mustache version 0.1.8](http://pub.dartlang.org/packages/mustache) -* [Dart postgresql version 0.2.14](http://pub.dartlang.org/packages/postgresql) -* [Dart yaml version 2.0.1+1](http://pub.dartlang.org/packages/yaml) +* [Dart SDK version >=1.6.0](https://dart.dev/) +* [Dart args version 0.12.0+2](https://pub.dev/packages/args) +* [Dart crypto version 0.9.0](https://pub.dev/packages/crypto) +* [Dart mustache version 0.1.8](https://pub.dev/packages/mustache) +* [Dart postgresql version 0.2.14](https://pub.dev/packages/postgresql) +* [Dart yaml version 2.0.1+1](https://pub.dev/packages/yaml) ## Test URLs diff --git a/frameworks/Dart/dart2/.gitignore b/frameworks/Dart/dart2/.gitignore new file mode 100644 index 00000000000..e3d382c1bc8 --- /dev/null +++ b/frameworks/Dart/dart2/.gitignore @@ -0,0 +1,3 @@ +.packages +.dart_tool +pubspec.lock diff --git a/frameworks/Dart/dart2/README.md b/frameworks/Dart/dart2/README.md new file mode 100644 index 00000000000..d5c281055e0 --- /dev/null +++ b/frameworks/Dart/dart2/README.md @@ -0,0 +1,22 @@ +# Dart 2 Benchmarking Test + +### Test Type Implementation Source Code + +- [JSON](server.dart) +- [PLAINTEXT](server.dart) + +## Important Libraries + +The tests were run with: + +- [Dart v2.12](https://dart.dev/) + +## Test URLs + +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext diff --git a/frameworks/Dart/dart2/analysis_options.yaml b/frameworks/Dart/dart2/analysis_options.yaml new file mode 100644 index 00000000000..81c61e2a2ef --- /dev/null +++ b/frameworks/Dart/dart2/analysis_options.yaml @@ -0,0 +1,28 @@ +include: package:pedantic/analysis_options.yaml +analyzer: + strong-mode: + implicit-casts: false +linter: + rules: + - avoid_dynamic_calls + - avoid_unused_constructor_parameters + - await_only_futures + - camel_case_types + - cancel_subscriptions + - constant_identifier_names + - control_flow_in_finally + - directives_ordering + - empty_statements + - hash_and_equals + - implementation_imports + - iterable_contains_unrelated_type + - list_remove_unrelated_type + - non_constant_identifier_names + - overridden_fields + - package_api_docs + - package_names + - package_prefixed_library_names + - prefer_expression_function_bodies + - test_types_in_equals + - throw_in_finally + - unnecessary_brace_in_string_interps diff --git a/frameworks/Dart/dart2/benchmark_config.json b/frameworks/Dart/dart2/benchmark_config.json new file mode 100644 index 00000000000..5c5c4d2e528 --- /dev/null +++ b/frameworks/Dart/dart2/benchmark_config.json @@ -0,0 +1,26 @@ +{ + "framework": "dart2", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Stripped", + "classification": "Platform", + "database": "None", + "framework": "None", + "language": "Dart", + "flavor": "None", + "orm": "None", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Dart2", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Dart/dart2/dart2.dockerfile b/frameworks/Dart/dart2/dart2.dockerfile new file mode 100644 index 00000000000..bb89fefd48f --- /dev/null +++ b/frameworks/Dart/dart2/dart2.dockerfile @@ -0,0 +1,11 @@ +FROM google/dart:2.12 + +WORKDIR /dart_app +COPY pubspec.yaml pubspec.yaml +COPY server.dart server.dart + +RUN pub upgrade + +EXPOSE 8080 + +CMD ["dart", "server.dart"] diff --git a/frameworks/Dart/dart2/pubspec.yaml b/frameworks/Dart/dart2/pubspec.yaml new file mode 100644 index 00000000000..d5206676a25 --- /dev/null +++ b/frameworks/Dart/dart2/pubspec.yaml @@ -0,0 +1,7 @@ +name: dartbenchmark +description: A benchmark of dart +environment: + sdk: '>=2.12.0 <3.0.0' + +dev_dependencies: + pedantic: ^1.0.0 diff --git a/frameworks/Dart/dart2/server.dart b/frameworks/Dart/dart2/server.dart new file mode 100644 index 00000000000..94c2cf80bb7 --- /dev/null +++ b/frameworks/Dart/dart2/server.dart @@ -0,0 +1,88 @@ +import 'dart:async' show Future; +import 'dart:convert'; +import 'dart:io'; +import 'dart:isolate'; + +final _procNumber = Platform.numberOfProcessors; + +final _encoder = JsonUtf8Encoder(); + +void main(List args) { + var errorPort = ReceivePort(); + errorPort.listen((e) => print(e)); + for (var i = 1; i < _procNumber; i++) { + Isolate.spawn(_startInIsolate, [], onError: errorPort.sendPort); + } + _startInIsolate([]); +} + +void _startInIsolate(List args) { + _startServer(); +} + +Future _startServer() async { + final server = await HttpServer.bind('0.0.0.0', 8080, shared: true); + server.defaultResponseHeaders.clear(); + server.serverHeader = 'dart'; + server.listen((request) { + switch (request.uri.path) { + case '/json': + _jsonTest(request); + break; + case '/plaintext': + _plaintextTest(request); + break; + default: + _sendResponse(request, HttpStatus.notFound); + break; + } + }); +} + +/// Completes the given [request] by writing the [bytes] with the given +/// [statusCode] and [type]. +void _sendResponse( + HttpRequest request, + int statusCode, { + ContentType? type, + List? bytes, +}) { + final response = request.response; + response + ..statusCode = statusCode + ..headers.date = DateTime.now(); + if (type != null) { + response.headers.contentType = type; + } + if (bytes != null) { + response + ..contentLength = bytes.length + ..add(bytes); + } else { + response.contentLength = 0; + } + response.close(); +} + +/// Completes the given [request] by writing the [response] as JSON. +void _sendJson(HttpRequest request, Object response) => _sendResponse( + request, + HttpStatus.ok, + type: ContentType.json, + bytes: _encoder.convert(response), + ); + +/// Completes the given [request] by writing the [response] as plain text. +void _sendText(HttpRequest request, String response) => _sendResponse( + request, + HttpStatus.ok, + type: ContentType.text, + bytes: utf8.encode(response), + ); + +/// Responds with the JSON test to the [request]. +void _jsonTest(HttpRequest request) => + _sendJson(request, const {'message': 'Hello, World!'}); + +/// Responds with the plaintext test to the [request]. +void _plaintextTest(HttpRequest request) => _sendText(request, 'Hello, World!'); diff --git a/frameworks/Dart/dia/README.md b/frameworks/Dart/dia/README.md new file mode 100755 index 00000000000..9840bc7339b --- /dev/null +++ b/frameworks/Dart/dia/README.md @@ -0,0 +1,20 @@ +# Dia Benchmarking Test + +### Test Type Implementation Source Code + +* [JSON](src/server.dart) +* [PLAINTEXT](src/server.dart) + +## Important Libraries +The tests were run with: +* [Dart >=2.12.0 < 3.0.0](https://dart.dev) +* [Dia ^0.0.7](https://pub.dev/packages/dia) + +## Test URLs +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext diff --git a/frameworks/Dart/dia/benchmark_config.json b/frameworks/Dart/dia/benchmark_config.json new file mode 100755 index 00000000000..c8080800dbd --- /dev/null +++ b/frameworks/Dart/dia/benchmark_config.json @@ -0,0 +1,26 @@ +{ + "framework": "dia", + "tests": [ + { + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Micro", + "database": "None", + "framework": "Dia", + "language": "Dart", + "flavor": "None", + "orm": "None", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "Dia", + "notes": "", + "versus": "None" + } + } + ] +} diff --git a/frameworks/Dart/dia/dia.dockerfile b/frameworks/Dart/dia/dia.dockerfile new file mode 100644 index 00000000000..2a33936bae5 --- /dev/null +++ b/frameworks/Dart/dia/dia.dockerfile @@ -0,0 +1,13 @@ +FROM google/dart:2.12 + +#ADD ./ /src +WORKDIR /src + +COPY src/pubspec.yaml pubspec.yaml +COPY src/server.dart server.dart + +RUN pub upgrade + +EXPOSE 8080 + +CMD ["dart", "server.dart"] diff --git a/frameworks/Dart/dia/src/pubspec.yaml b/frameworks/Dart/dia/src/pubspec.yaml new file mode 100644 index 00000000000..528862f6816 --- /dev/null +++ b/frameworks/Dart/dia/src/pubspec.yaml @@ -0,0 +1,7 @@ +name: DiaBenchmark +description: A benchmark of Dart Dia, a http simple framework +environment: + sdk: ">=2.12.0 <3.0.0" +dependencies: + dia: ^0.0.7 + dia_router: ^0.0.5 diff --git a/frameworks/Dart/dia/src/server.dart b/frameworks/Dart/dia/src/server.dart new file mode 100644 index 00000000000..51593d09fec --- /dev/null +++ b/frameworks/Dart/dia/src/server.dart @@ -0,0 +1,36 @@ +import 'dart:io'; + +import 'package:dia/dia.dart'; +import 'package:dia_router/dia_router.dart'; + +class CustomContext extends Context with Routing { + CustomContext(HttpRequest request) : super(request); +} + +void main() { + final app = App(); + + final router = Router('/'); + + app.use((ctx, next) async { + ctx.set('Server', 'Dia'); + ctx.set('Date', HttpDate.format(DateTime.now()).toString()); + await next(); + }); + + router.get('/plaintext', (ctx, next) async { + ctx.body = 'Hello, World!'; + }); + + router.get('/json', (ctx, next) async { + ctx.contentType = ContentType.json; + ctx.body = '{"message":"Hello, World!"}'; + }); + + app.use(router.middleware); + + /// Start server listen on localhsot:8080 + app + .listen('0.0.0.0', 8080) + .then((info) => print('Server started on http://0.0.0.0:8080')); +} diff --git a/frameworks/Dart/start/README.md b/frameworks/Dart/start/README.md index 390e0cd2bb1..0ff26d2b79b 100644 --- a/frameworks/Dart/start/README.md +++ b/frameworks/Dart/start/README.md @@ -8,13 +8,13 @@ This test adds [Start](https://github.com/lvivski/start), a Sinatra inspired web ## Versions * [Dart SDK version >=1.3.0](https://launchpad.net/~hachre/+archive/dart) -* [Dart args version 0.11.0+1](http://pub.dartlang.org/packages/args) -* [Dart crypto version 0.9.0](http://pub.dartlang.org/packages/crypto) -* [Dart mustache version 0.1.8](http://pub.dartlang.org/packages/mustache) -* [Dart mongo_dart version 0.1.39](http://pub.dartlang.org/packages/mongo_dart) -* [Dart postgresql version 0.2.13](http://pub.dartlang.org/packages/postgresql) -* [Dart start version 0.2.4](http://pub.dartlang.org/packages/start) -* [Dart yaml version 0.9.0](http://pub.dartlang.org/packages/yaml) +* [Dart args version 0.11.0+1](https://pub.dev/packages/args) +* [Dart crypto version 0.9.0](https://pub.dev/packages/crypto) +* [Dart mustache version 0.1.8](https://pub.dev/packages/mustache) +* [Dart mongo_dart version 0.1.39](https://pub.dev/packages/mongo_dart) +* [Dart postgresql version 0.2.13](https://pub.dev/packages/postgresql) +* [Dart start version 0.2.4](https://pub.dev/packages/start) +* [Dart yaml version 0.9.0](https://pub.dev/packages/yaml) ## Test URLs diff --git a/frameworks/Dart/stream/README.md b/frameworks/Dart/stream/README.md index 8b04f694293..07aafcd28df 100644 --- a/frameworks/Dart/stream/README.md +++ b/frameworks/Dart/stream/README.md @@ -8,12 +8,12 @@ This test adds [Stream](https://github.com/rikulo/stream), a lightweight Dart we ## Versions * [Dart SDK version >=1.3](https://launchpad.net/~hachre/+archive/dart) -* [Dart args version 0.11.0+1](http://pub.dartlang.org/packages/args) -* [Dart crypto version 0.9.0](http://pub.dartlang.org/packages/crypto) -* [Dart mongo_dart version 0.1.39](http://pub.dartlang.org/packages/mongo_dart) -* [Dart postgresql version 0.2.13](http://pub.dartlang.org/packages/postgresql) -* [Dart stream version 1.2.0](http://pub.dartlang.org/packages/start) -* [Dart yaml version 0.9.0](http://pub.dartlang.org/packages/yaml) +* [Dart args version 0.11.0+1](https://pub.dev/packages/args) +* [Dart crypto version 0.9.0](https://pub.dev/packages/crypto) +* [Dart mongo_dart version 0.1.39](https://pub.dev/packages/mongo_dart) +* [Dart postgresql version 0.2.13](https://pub.dev/packages/postgresql) +* [Dart stream version 1.2.0](https://pub.dev/packages/start) +* [Dart yaml version 0.9.0](https://pub.dev/packages/yaml) ## Test URLs diff --git a/frameworks/Elixir/phoenix/mix.exs b/frameworks/Elixir/phoenix/mix.exs index 99b036d9c37..c47a0c3a2ee 100755 --- a/frameworks/Elixir/phoenix/mix.exs +++ b/frameworks/Elixir/phoenix/mix.exs @@ -27,7 +27,7 @@ defmodule Hello.Mixfile do # Type `mix help deps` for examples and options defp deps do [ - {:phoenix, "~> 1.5.6"}, + {:phoenix, "~> 1.5.9"}, {:phoenix_ecto, "~> 4.2"}, {:ecto_sql, "~> 3.5"}, {:postgrex, "~> 0.15"}, diff --git a/frameworks/Elixir/phoenix/mix.lock b/frameworks/Elixir/phoenix/mix.lock index 403575b84b2..bfb7a883460 100644 --- a/frameworks/Elixir/phoenix/mix.lock +++ b/frameworks/Elixir/phoenix/mix.lock @@ -1,24 +1,24 @@ %{ "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"}, - "cowboy": {:hex, :cowboy, "2.8.0", "f3dc62e35797ecd9ac1b50db74611193c29815401e53bac9a5c0577bd7bc667d", [:rebar3], [{:cowlib, "~> 2.9.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "4643e4fba74ac96d4d152c75803de6fad0b3fa5df354c71afdd6cbeeb15fac8a"}, + "cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.3.1", "ebd1a1d7aff97f27c66654e78ece187abdc646992714164380d8a041eda16754", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3a6efd3366130eab84ca372cbd4a7d3c3a97bdfcfb4911233b035d117063f0af"}, - "cowlib": {:hex, :cowlib, "2.9.1", "61a6c7c50cf07fdd24b2f45b89500bb93b6686579b069a89f88cb211e1125c78", [:rebar3], [], "hexpm", "e4175dc240a70d996156160891e1c62238ede1729e45740bdd38064dad476170"}, - "db_connection": {:hex, :db_connection, "2.3.1", "4c9f3ed1ef37471cbdd2762d6655be11e38193904d9c5c1c9389f1b891a3088e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm", "abaab61780dde30301d840417890bd9f74131041afd02174cf4e10635b3a63f5"}, + "cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"}, + "db_connection": {:hex, :db_connection, "2.4.0", "d04b1b73795dae60cead94189f1b8a51cc9e1f911c234cc23074017c43c031e5", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ad416c21ad9f61b3103d254a71b63696ecadb6a917b36f563921e0de00d7d7c8"}, "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, - "ecto": {:hex, :ecto, "3.5.7", "f440a476bf1be361173a43a4a18f04a2fdf4e6fac5b0457f03d8686e55f13f7e", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "04c4e69d4f1cc2bb085aa760d50389ba8ae3003f80c112fbde87d57f5ed75d39"}, - "ecto_sql": {:hex, :ecto_sql, "3.5.4", "a9e292c40bd79fff88885f95f1ecd7b2516e09aa99c7dd0201aa84c54d2358e4", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.5.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "1fff1a28a898d7bbef263f1f3ea425b04ba9f33816d843238c84eff883347343"}, + "ecto": {:hex, :ecto, "3.6.1", "7bb317e3fd0179ad725069fd0fe8a28ebe48fec6282e964ea502e4deccb0bd0f", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbb3294a990447b19f0725488a749f8cf806374e0d9d0dffc45d61e7aeaf6553"}, + "ecto_sql": {:hex, :ecto_sql, "3.6.1", "8774dc3fc0ff7b6be510858b99883640f990c0736b8ab54588f9a0c91807f909", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.6.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.4.0 or ~> 0.5.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "66f35c3f2d5978b6bffebd1e6351ab8c9d6b68650d62abd1ab8d149de40e0779"}, "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, "jason": {:hex, :jason, "1.2.2", "ba43e3f2709fd1aa1dce90aaabfd039d000469c05c56f0b8e31978e03fa39052", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "18a228f5f0058ee183f29f9eae0805c6e59d61c3b006760668d8d18ff0d12179"}, - "mime": {:hex, :mime, "1.5.0", "203ef35ef3389aae6d361918bf3f952fa17a09e8e43b5aa592b93eba05d0fb8d", [:mix], [], "hexpm", "55a94c0f552249fc1a3dd9cd2d3ab9de9d3c89b559c2bd01121f824834f24746"}, - "phoenix": {:hex, :phoenix, "1.5.7", "2923bb3af924f184459fe4fa4b100bd25fa6468e69b2803dfae82698269aa5e0", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.13", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.1.2 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "774cd64417c5a3788414fdbb2be2eb9bcd0c048d9e6ad11a0c1fd67b7c0d0978"}, + "mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"}, + "phoenix": {:hex, :phoenix, "1.5.9", "a6368d36cfd59d917b37c44386e01315bc89f7609a10a45a22f47c007edf2597", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.13 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.1.2 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7e4bce20a67c012f1fbb0af90e5da49fa7bf0d34e3a067795703b74aef75427d"}, "phoenix_ecto": {:hex, :phoenix_ecto, "4.2.1", "13f124cf0a3ce0f1948cf24654c7b9f2347169ff75c1123f44674afee6af3b03", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 2.15", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "478a1bae899cac0a6e02be1deec7e2944b7754c04e7d4107fc5a517f877743c0"}, "phoenix_html": {:hex, :phoenix_html, "2.14.3", "51f720d0d543e4e157ff06b65de38e13303d5778a7919bcc696599e5934271b8", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "efd697a7fff35a13eeeb6b43db884705cba353a1a41d127d118fda5f90c8e80f"}, - "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.3.0", "f35f61c3f959c9a01b36defaa1f0624edd55b87e236b606664a556d6f72fd2e7", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "02c1007ae393f2b76ec61c1a869b1e617179877984678babde131d716f95b582"}, + "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.3.1", "9eba6ad16bd80c45f338b2059c7b255ce30784d76f4181304e7b78640e5a7513", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "f3ae26b5abb85a1cb2bc8bb199e29fbcefb34259e469b31fe0c6323f2175a5ef"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.0.0", "a1ae76717bb168cdeb10ec9d92d1480fec99e3080f011402c0a2d68d47395ffb", [:mix], [], "hexpm", "c52d948c4f261577b9c6fa804be91884b381a7f8f18450c5045975435350f771"}, - "plug": {:hex, :plug, "1.11.0", "f17217525597628298998bc3baed9f8ea1fa3f1160aa9871aee6df47a6e4d38e", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2d9c633f0499f9dc5c2fd069161af4e2e7756890b81adcbb2ceaa074e8308876"}, - "plug_cowboy": {:hex, :plug_cowboy, "2.4.1", "779ba386c0915027f22e14a48919a9545714f849505fa15af2631a0d298abf0f", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d72113b6dff7b37a7d9b2a5b68892808e3a9a752f2bf7e503240945385b70507"}, - "plug_crypto": {:hex, :plug_crypto, "1.2.1", "5c854427528bf61d159855cedddffc0625e2228b5f30eff76d5a4de42d896ef4", [:mix], [], "hexpm", "6961c0e17febd9d0bfa89632d391d2545d2e0eb73768f5f50305a23961d8782c"}, - "postgrex": {:hex, :postgrex, "0.15.8", "f5e782bbe5e8fa178d5e3cd1999c857dc48eda95f0a4d7f7bd92a50e84a0d491", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "698fbfacea34c4cf22c8281abeb5cf68d99628d541874f085520ab3b53d356fe"}, - "ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm", "451d8527787df716d99dc36162fca05934915db0b6141bbdac2ea8d3c7afc7d7"}, - "telemetry": {:hex, :telemetry, "0.4.2", "2808c992455e08d6177322f14d3bdb6b625fbcfd233a73505870d8738a2f4599", [:rebar3], [], "hexpm", "2d1419bd9dda6a206d7b5852179511722e2b18812310d304620c7bd92a13fcef"}, + "plug": {:hex, :plug, "1.11.1", "f2992bac66fdae679453c9e86134a4201f6f43a687d8ff1cd1b2862d53c80259", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "23524e4fefbb587c11f0833b3910bfb414bf2e2534d61928e920f54e3a1b881f"}, + "plug_cowboy": {:hex, :plug_cowboy, "2.5.0", "51c998f788c4e68fc9f947a5eba8c215fbb1d63a520f7604134cab0270ea6513", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5b2c8925a5e2587446f33810a58c01e66b3c345652eeec809b76ba007acde71a"}, + "plug_crypto": {:hex, :plug_crypto, "1.2.2", "05654514ac717ff3a1843204b424477d9e60c143406aa94daf2274fdd280794d", [:mix], [], "hexpm", "87631c7ad914a5a445f0a3809f99b079113ae4ed4b867348dd9eec288cecb6db"}, + "postgrex": {:hex, :postgrex, "0.15.9", "46f8fe6f25711aeb861c4d0ae09780facfdf3adbd2fb5594ead61504dd489bda", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "610719103e4cb2223d4ab78f9f0f3e720320eeca6011415ab4137ddef730adee"}, + "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, + "telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"}, } diff --git a/frameworks/Elixir/phoenix/phoenix.dockerfile b/frameworks/Elixir/phoenix/phoenix.dockerfile index c6fc03a04c2..79dbaa3042a 100644 --- a/frameworks/Elixir/phoenix/phoenix.dockerfile +++ b/frameworks/Elixir/phoenix/phoenix.dockerfile @@ -1,4 +1,4 @@ -FROM elixir:1.11.3 +FROM elixir:1.11.4 WORKDIR /phoenix diff --git a/frameworks/Go/gin/benchmark_config.json b/frameworks/Go/gin/benchmark_config.json index 023dca5139a..4c227388ff1 100644 --- a/frameworks/Go/gin/benchmark_config.json +++ b/frameworks/Go/gin/benchmark_config.json @@ -46,6 +46,28 @@ "display_name": "gin-scratch", "notes": "", "versus": "go" + }, + "gorm": { + "db_url": "/db", + "json_url": "/json", + "query_url": "/queries?queries=", + "update_url": "/updates?queries=", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "postgres", + "framework": "gin-gorm", + "language": "Go", + "flavor": "None", + "orm": "Full", + "platform": "None", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "gin-gorm", + "notes": "", + "versus": "go" } }] } diff --git a/frameworks/Go/gin/gin-gorm.dockerfile b/frameworks/Go/gin/gin-gorm.dockerfile new file mode 100644 index 00000000000..c9ec686aeb3 --- /dev/null +++ b/frameworks/Go/gin/gin-gorm.dockerfile @@ -0,0 +1,15 @@ +FROM golang as build-env + +WORKDIR /src/ +ADD ./gin-gorm /src/ + +#- original submission +RUN go build -o app +#RUN go build -tags=jsoniter -o app - tryed this but slower on my pc + +FROM gcr.io/distroless/base:debug + +ENV GIN_MODE=release + +COPY --from=build-env /src/app /app +ENTRYPOINT ["/app"] \ No newline at end of file diff --git a/frameworks/Go/gin/gin-gorm/README.md b/frameworks/Go/gin/gin-gorm/README.md new file mode 100755 index 00000000000..f100897cc3e --- /dev/null +++ b/frameworks/Go/gin/gin-gorm/README.md @@ -0,0 +1,43 @@ +# Gin-Gorm Benchmarking Test + +### Test Type Implementation Source Code + +* [JSON](main.go) +* [PLAINTEXT](main.go) +* [DB](main.go) +* [QUERY](main.go) +* [CACHED QUERY](not implemented) +* [UPDATE](main.go) +* [FORTUNES](not implemented) + +## Important Libraries +The tests were run with techempower suite + +## Test URLs +### JSON + +http://localhost:8080/json + +### PLAINTEXT + +http://localhost:8080/plaintext + +### DB + +http://localhost:8080/db + +### QUERY + +http://localhost:8080/queries?queries= + +### CACHED QUERY + +NA + +### UPDATE + +http://localhost:8080/updates?queries= + +### FORTUNES + +NA diff --git a/frameworks/Go/gin/gin-gorm/go.mod b/frameworks/Go/gin/gin-gorm/go.mod new file mode 100644 index 00000000000..8dddcea9df2 --- /dev/null +++ b/frameworks/Go/gin/gin-gorm/go.mod @@ -0,0 +1,19 @@ +module gin-gorm/main + +go 1.15 + +require ( + github.com/gin-gonic/gin v1.6.3 + github.com/go-playground/validator/v10 v10.4.2 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/json-iterator/go v1.1.10 // indirect + github.com/leodido/go-urn v1.2.1 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.1 // indirect + github.com/ugorji/go v1.2.5 // indirect + golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 // indirect + golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gorm.io/driver/postgres v1.0.8 + gorm.io/gorm v1.21.6 +) diff --git a/frameworks/Go/gin/gin-gorm/go.sum b/frameworks/Go/gin/gin-gorm/go.sum new file mode 100644 index 00000000000..b3b17ed3bb0 --- /dev/null +++ b/frameworks/Go/gin/gin-gorm/go.sum @@ -0,0 +1,242 @@ +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3 h1:ahKqKTFpO5KTPHxWZjEdPScmYaGtLo8Y4DMHoEsnp14= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= +github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.2.0 h1:KgJ0snyC2R9VXYN2rneOtQcw5aHQB1Vv0sFl1UcHBOY= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= +github.com/go-playground/validator/v10 v10.4.2 h1:RqFe5MzGf2UOFhxQYnjHabHOT6CLbYWkeXOfcXB7fsM= +github.com/go-playground/validator/v10 v10.4.2/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE= +github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.4.0/go.mod h1:Y2O3ZDF0q4mMacyWV3AstPJpeHXWGEetiFttmq5lahk= +github.com/jackc/pgconn v1.5.0/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI= +github.com/jackc/pgconn v1.5.1-0.20200601181101-fa742c524853/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI= +github.com/jackc/pgconn v1.8.0 h1:FmjZ0rOyXTr1wfWs45i4a9vjnjWUAGpMuQLD9OSs+lw= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2 h1:JVX6jT/XfzNqIjye4717ITLaNwV9mWbJx0dLCpcRzdA= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.0.6 h1:b1105ZGEMFe7aCvrT1Cca3VoVb4ZFMaFJLJcg/3zD+8= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.2.0/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0= +github.com/jackc/pgtype v1.3.1-0.20200510190516-8cd94a14c75a/go.mod h1:vaogEUkALtxZMCH411K+tKzNpwzCKU+AnPzBKZ+I+Po= +github.com/jackc/pgtype v1.3.1-0.20200606141011-f6355165a91c/go.mod h1:cvk9Bgu/VzJ9/lxTO5R5sf80p0DiucVtN7ZxvaC4GmQ= +github.com/jackc/pgtype v1.6.2 h1:b3pDeuhbbzBYcg5kwNmNDun4pFUD/0AAr1kLXZLeNt8= +github.com/jackc/pgtype v1.6.2/go.mod h1:JCULISAZBFGrHaOXIIFiyfzW5VY0GRitRr8NeJsrdig= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.5.0/go.mod h1:EpAKPLdnTorwmPUUsqrPxy5fphV18j9q3wrfRXgo+kA= +github.com/jackc/pgx/v4 v4.6.1-0.20200510190926-94ba730bb1e9/go.mod h1:t3/cdRQl6fOLDxqtlyhe9UWgfIi9R8+8v8GKV5TRA/o= +github.com/jackc/pgx/v4 v4.6.1-0.20200606145419-4e5062306904/go.mod h1:ZDaNWkt9sW1JMiNn0kdYBaLelIhw7Pg4qd+Vk6tw7Hg= +github.com/jackc/pgx/v4 v4.10.1 h1:/6Q3ye4myIj6AaplUm+eRcz4OhK9HAvFf4ePsG40LJY= +github.com/jackc/pgx/v4 v4.10.1/go.mod h1:QlrWebbs3kqEZPHCTGyxecvzG6tvIsYu+A5b1raylkA= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jinzhu/now v1.1.2 h1:eVKgfIdy9b6zbWBMgFpfDPoAMifwSZagU9HmEU6zgiI= +github.com/jinzhu/now v1.1.2/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= +github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.3.0 h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU= +github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc h1:jUIKcSPO9MoMJBbEoyE/RJoE8vz7Mb8AjvifMMwSyvY= +github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go v1.2.5 h1:NozRHfUeEta89taVkyfsDVSy2f7v89Frft4pjnWuGuc= +github.com/ugorji/go v1.2.5/go.mod h1:gat2tIT8KJG8TVI8yv77nEO/KYT6dV7JE1gfUa8Xuls= +github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= +github.com/ugorji/go/codec v1.2.5 h1:8WobZKAk18Msm2CothY2jnztY56YVY8kF1oQrj21iis= +github.com/ugorji/go/codec v1.2.5/go.mod h1:QPxoTbPKSEAlAHPYt02++xp/en9B/wUdwFCz+hj5caA= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57 h1:F5Gozwx4I1xtr/sr/8CFbb57iKi3297KFs0QDbGN60A= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/postgres v1.0.8 h1:PAgM+PaHOSAeroTjHkCHCBIHHoBIf9RgPWGo8dF2DA8= +gorm.io/driver/postgres v1.0.8/go.mod h1:4eOzrI1MUfm6ObJU/UcmbXyiHSs8jSwH95G5P5dxcAg= +gorm.io/gorm v1.20.12/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw= +gorm.io/gorm v1.21.6 h1:xEFbH7WShsnAM+HeRNv7lOeyqmDAK+dDnf1AMf/cVPQ= +gorm.io/gorm v1.21.6/go.mod h1:F+OptMscr0P2F2qU97WT1WimdH9GaQPoDW7AYd5i2Y0= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= diff --git a/frameworks/Go/gin/gin-gorm/main.go b/frameworks/Go/gin/gin-gorm/main.go new file mode 100644 index 00000000000..6974b056e60 --- /dev/null +++ b/frameworks/Go/gin/gin-gorm/main.go @@ -0,0 +1,189 @@ +package main + +import ( + "fmt" + "math/rand" + "net/http" + "time" + + "github.com/gin-gonic/gin" + postgres "gorm.io/driver/postgres" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +// World represents an entry int the World table +type World struct { + ID int64 `json:"id"` + RandomNumber int64 `json:"randomNumber" gorm:"column:randomnumber"` +} + +// Override GORM convention for table mapping +// TableName overrides the table name used by User to `profiles` +func (World) TableName() string { + return "World" +} + +// implements the basic logic behind the query tests +func getWorld(db *gorm.DB) World { + //we could actually precompute a list of random + //numbers and slice them but this makes no sense + //as I expect that this 'random' is just a placeholder + //for an actual business logic + randomId := rand.Intn(10000) + 1 + + var world World + db.Take(&world, randomId) + + return world +} + +// implements the logic behind the updates tests +func processWorld(tx *gorm.DB) (World, error) { + //we could actually precompute a list of random + //numbers and slice them but this makes no sense + //as I expect that this 'random' is just a placeholder + //for an actual business logic in a real test + randomId := rand.Intn(10000) + 1 + randomId2 := int64(rand.Intn(10000) + 1) + + var world World + tx.Take(&world, randomId) + + world.RandomNumber = randomId2 + err := tx.Save(&world).Error + + return world, err +} + +func main() { + /* SETUP DB AND WEB SERVER */ + + dsn := "host=tfb-database user=benchmarkdbuser password=benchmarkdbpass dbname=hello_world port=5432 sslmode=disable" + db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{ + PrepareStmt: true, // use prep statements + Logger: logger.Default.LogMode(logger.Silent), // new, not inserted in original submission 2x on query + }) + + if err != nil { + panic("failed to connect database") + } + + sqlDB, err := db.DB() + if err != nil { + panic("failed to get underlying db conn pooling struct") + } + + // SetMaxIdleConns sets the maximum number of connections in the idle connection pool. + sqlDB.SetMaxIdleConns(500) + + //r := gin.Default() // use default middleware - original submission + r := gin.New() // use no middleware, causes 1.83x on plaintext (pure gin still at +14% on both plaintext and json) + + // setup middleware to add server header + // this slows things a little bit but it is the best design decision + serverHeader := []string{"Gin-gorm"} + r.Use(func(c *gin.Context) { + c.Writer.Header()["Server"] = serverHeader + }) + + /* START TESTS */ + + // JSON TEST + r.GET("/json", func(c *gin.Context) { + //c.Header("Server", "example") - original submission now using middleware + c.JSON(200, gin.H{"message": "Hello, World!"}) + }) + + // PLAINTEXT TEST + r.GET("/plaintext", func(c *gin.Context) { + //c.Header("Server", "example") - original submission now using middleware + c.String(200, "Hello, World!") + }) + + // SINGLE QUERY + r.GET("/db", func(c *gin.Context) { + world := getWorld(db) + + //c.Header("Server", "example") - original submission now using middleware + c.JSON(200, world) + }) + + type NumOf struct { + Queries int `form:"queries"` + } + + // MULTIPLE QUERIES + r.GET("/queries", func(c *gin.Context) { + var numOf NumOf + + if c.ShouldBindQuery(&numOf) != nil { // manage missing query num + numOf.Queries = 1 + + } else if numOf.Queries < 1 { //set at least 1 + numOf.Queries = 1 + + } else if numOf.Queries > 500 { //set no more than 500 + numOf.Queries = 500 + } + + worlds := make([]World, numOf.Queries, numOf.Queries) //prealloc + + //original submission with go routines, seems faster then without... + channel := make(chan World, numOf.Queries) + + for i := 0; i < numOf.Queries; i++ { + go func() { channel <- getWorld(db) }() + } + + for i := 0; i < numOf.Queries; i++ { + worlds[i] = <-channel + } + + //c.Header("Server", "example") - original submission now using middleware + c.JSON(200, worlds) + }) + + // MULTIPLE UPDATES + r.GET("/updates", func(c *gin.Context) { + var numOf NumOf + + if c.ShouldBindQuery(&numOf) != nil { // manage missing query num + numOf.Queries = 1 + + } else if numOf.Queries < 1 { //set at least 1 + numOf.Queries = 1 + + } else if numOf.Queries > 500 { //set no more than 500 + numOf.Queries = 500 + } + + worlds := make([]World, numOf.Queries, numOf.Queries) //prealloc + var err error = nil + + //c.Header("Server", "example") - original submission now using middleware + + for i := 0; i < numOf.Queries; i++ { + worlds[i], err = processWorld(db) + + if err != nil { + fmt.Println(err) + c.JSON(500, gin.H{"error": err}) + break + } + } + + c.JSON(200, worlds) + }) + + /* START SERVICE */ + + s := &http.Server{ + Addr: ":8080", + Handler: r, + ReadTimeout: 100000 * time.Second, //increase keepalive + WriteTimeout: 100000 * time.Second, + } + + s.ListenAndServe() // listen and serve on 0.0.0.0:8080 +} diff --git a/frameworks/Haskell/ihp/src/Config/nix/nixpkgs-config.nix b/frameworks/Haskell/ihp/src/Config/nix/nixpkgs-config.nix index dcaf772e4e8..96d50877dea 100644 --- a/frameworks/Haskell/ihp/src/Config/nix/nixpkgs-config.nix +++ b/frameworks/Haskell/ihp/src/Config/nix/nixpkgs-config.nix @@ -1,101 +1,6 @@ +# See https://ihp.digitallyinduced.com/Guide/package-management.html { ihp }: - -let - dontCheckPackages = [ - "cabal-helper" - "generic-lens" - "filesystem-conduit" - "tz" - "typerep-map" - "trifecta" - "hackage-security" - ]; - - doJailbreakPackages = [ - "filesystem-conduit" - "http-media" - "aeson" - "wreq" - "ghcide" - "brittany" - ]; - - dontHaddockPackages = []; - - nixPkgsRev = "c985bf793e6ab7d54a9182381b4b610fe0ae6936"; - nixPkgsSha256 = "0zsj9imjbnhkb65r169xxqmjgqd5593insrvncvabg1iqdsrcxz1"; - - compiler = "ghc883"; - - generatedOverrides = haskellPackagesNew: haskellPackagesOld: - let - toPackage = dir: file: _: { - name = builtins.replaceStrings [ ".nix" ] [ "" ] file; - - value = haskellPackagesNew.callPackage ("${dir}/${file}") {}; - }; - makePackageSet = dir: pkgs.lib.mapAttrs' (toPackage dir) (builtins.readDir dir); - in - { "ihp" = ((haskellPackagesNew.callPackage "${ihp}/ihp.nix") { }); } // (makePackageSet ./haskell-packages/.) // (makePackageSet "${ihp}/NixSupport/haskell-packages/."); - - makeOverrides = - function: names: haskellPackagesNew: haskellPackagesOld: - let - toPackage = name: { - inherit name; - - value = function haskellPackagesOld.${name}; - }; - in - builtins.listToAttrs (map toPackage names); - - composeExtensionsList = pkgs.lib.fold pkgs.lib.composeExtensions (_: _: {}); - - - # More exotic overrides go here - manualOverrides = haskellPackagesNew: haskellPackagesOld: { - haskell-language-server = haskellPackagesOld.haskell-language-server.overrideScope ( self: super: { aeson = pkgs.haskell.lib.dontCheck haskellPackagesNew.aeson_1_5_2_0; } ); - hls-plugin-api = haskellPackagesOld.hls-plugin-api.overrideScope ( self: super: { aeson = pkgs.haskell.lib.dontCheck haskellPackagesNew.aeson_1_5_2_0; } ); - yaml = haskellPackagesOld.yaml.overrideScope ( self: super: { aeson = pkgs.haskell.lib.dontCheck haskellPackagesNew.aeson_1_5_2_0; } ); - lsp-test = haskellPackagesOld.lsp-test.overrideScope ( self: super: { aeson = pkgs.haskell.lib.dontCheck haskellPackagesNew.aeson_1_5_2_0; } ); - haskell-lsp-types = haskellPackagesOld.haskell-lsp-types.overrideScope ( self: super: { aeson = pkgs.haskell.lib.dontCheck haskellPackagesNew.aeson_1_5_2_0; } ); - haskell-lsp = haskellPackagesOld.haskell-lsp.overrideScope ( self: super: { aeson = pkgs.haskell.lib.dontCheck haskellPackagesNew.aeson_1_5_2_0; } ); - aeson-pretty = haskellPackagesOld.aeson-pretty.overrideScope ( self: super: { aeson = pkgs.haskell.lib.dontCheck haskellPackagesNew.aeson_1_5_2_0; } ); - aeson = pkgs.haskell.lib.dontCheck haskellPackagesOld.aeson_1_5_2_0; - }; - - #mkDerivation = args: super.mkDerivation (args // { - # enableLibraryProfiling = true; - #}); - config = { - allowBroken = true; - packageOverrides = pkgs: rec { - haskell = pkgs.haskell // { - packages = pkgs.haskell.packages // { - "${compiler}" = - pkgs.haskell.packages."${compiler}".override { - overrides = composeExtensionsList [ - generatedOverrides - (makeOverrides pkgs.haskell.lib.dontCheck dontCheckPackages ) - (makeOverrides pkgs.haskell.lib.doJailbreak doJailbreakPackages) - (makeOverrides pkgs.haskell.lib.dontHaddock dontHaddockPackages) - manualOverrides - ]; - }; - } - ; - } - ; - }; - }; - - - pkgs = (import ((import {}).fetchFromGitHub { - owner = "NixOS"; - repo = "nixpkgs"; - rev = nixPkgsRev; - sha256 = nixPkgsSha256; - })) { inherit config; }; - -in -pkgs +import "${toString ihp}/NixSupport/make-nixpkgs-from-options.nix" { + ihp = ihp; + haskellPackagesDir = ./haskell-packages/.; +} \ No newline at end of file diff --git a/frameworks/Haskell/ihp/src/Main.hs b/frameworks/Haskell/ihp/src/Main.hs index 2da49fc0382..afc4e3a2195 100755 --- a/frameworks/Haskell/ihp/src/Main.hs +++ b/frameworks/Haskell/ihp/src/Main.hs @@ -7,11 +7,15 @@ import IHP.RouterSupport import IHP.FrameworkConfig import Web.FrontController import Web.Types +import IHP.Job.Types instance FrontController RootApplication where controllers = [ mountFrontController WebApplication ] +instance Worker RootApplication where + workers _ = [] + main :: IO () -main = IHP.Server.run config +main = IHP.Server.run config \ No newline at end of file diff --git a/frameworks/Haskell/ihp/src/default.nix b/frameworks/Haskell/ihp/src/default.nix index 2c566f00b19..0e1a0abe92f 100644 --- a/frameworks/Haskell/ihp/src/default.nix +++ b/frameworks/Haskell/ihp/src/default.nix @@ -1,7 +1,7 @@ let ihp = builtins.fetchGit { url = "https://github.com/digitallyinduced/ihp.git"; - rev = "d02a0699220a87d32889ff2a7b87ad81f8bc8195"; + ref = "refs/tags/v0.9.0"; }; haskellEnv = import "${ihp}/NixSupport/default.nix" { ihp = ihp; diff --git a/frameworks/Java/magician-io/README.md b/frameworks/Java/magician-io/README.md new file mode 100644 index 00000000000..59dafa64d82 --- /dev/null +++ b/frameworks/Java/magician-io/README.md @@ -0,0 +1,15 @@ +## Magician Benchmarking Test + +This is Magician's official website address[http://magician-io.com](http://magician-io.com) + +## Versions + +- Java OpenJDK 1.8 +- Martian 3.2.13 + +##Test URLs +### JSON Encoding Test +http://localhost:8080/json + +### Plaintext Encoding Test +http://localhost:8080/plaintext \ No newline at end of file diff --git a/frameworks/Java/magician-io/benchmark_config.json b/frameworks/Java/magician-io/benchmark_config.json new file mode 100644 index 00000000000..8a7db88fe96 --- /dev/null +++ b/frameworks/Java/magician-io/benchmark_config.json @@ -0,0 +1,24 @@ +{ + "framework": "magician", + "tests": [{ + "default": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "database": "None", + "framework": "magician", + "language": "Java", + "flavor": "None", + "orm": "None", + "platform": "magician", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "magician-io", + "notes": "magician io", + "versus": "magician" + } + }] +} \ No newline at end of file diff --git a/frameworks/Java/magician-io/magician.dockerfile b/frameworks/Java/magician-io/magician.dockerfile new file mode 100644 index 00000000000..6f337eca480 --- /dev/null +++ b/frameworks/Java/magician-io/magician.dockerfile @@ -0,0 +1,14 @@ +FROM maven:3.6.1-jdk-11-slim as maven +WORKDIR /magician-io +COPY pom.xml pom.xml +COPY src src +RUN mvn package -q + +FROM openjdk:11.0.3-jdk-slim +WORKDIR /magician-io +COPY --from=maven /magician-io/target/magician-io.jar magician-io.jar +COPY --from=maven /magician-io/target/lib lib + +EXPOSE 8080 + +CMD ["java", "-jar", "magician-io.jar"] \ No newline at end of file diff --git a/frameworks/Java/magician-io/pom.xml b/frameworks/Java/magician-io/pom.xml new file mode 100644 index 00000000000..67b182e239f --- /dev/null +++ b/frameworks/Java/magician-io/pom.xml @@ -0,0 +1,74 @@ + + + 4.0.0 + + org.example + magician-io + 1.0-SNAPSHOT + + + UTF-8 + 11 + 11 + + + + + com.github.yuyenews + Magician + 1.1.10 + + + + com.alibaba + fastjson + 1.2.76 + + + + + + org.slf4j + slf4j-jdk14 + 1.7.12 + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + magician-io + + + com.test.io.Start + true + ./lib/ + false + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy + package + + copy-dependencies + + + ${project.build.directory}/lib + + + + + + + \ No newline at end of file diff --git a/frameworks/Java/magician-io/src/main/java/com/test/io/Start.java b/frameworks/Java/magician-io/src/main/java/com/test/io/Start.java new file mode 100644 index 00000000000..6770037f765 --- /dev/null +++ b/frameworks/Java/magician-io/src/main/java/com/test/io/Start.java @@ -0,0 +1,31 @@ +package com.test.io; + +import com.test.io.handler.JsonHandler; +import com.test.io.handler.TextHandler; +import io.magician.Magician; +import io.magician.common.event.EventGroup; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.Executors; + +public class Start { + + private static Logger logger = LoggerFactory.getLogger(Start.class); + + public static void main(String[] args) { + try { + + EventGroup ioEventGroup = new EventGroup(1, Executors.newCachedThreadPool()); + EventGroup workerEventGroup = new EventGroup(20, Executors.newCachedThreadPool()); + + Magician.createTCPServer(ioEventGroup, workerEventGroup) + .handler("/json", new JsonHandler()) + .handler("/plaintext", new TextHandler()) + .bind(8080, 10000); + + } catch (Exception e){ + logger.error("启动服务出现异常", e); + } + } +} diff --git a/frameworks/Java/magician-io/src/main/java/com/test/io/handler/JsonHandler.java b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/JsonHandler.java new file mode 100644 index 00000000000..50345dcb2a4 --- /dev/null +++ b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/JsonHandler.java @@ -0,0 +1,28 @@ +package com.test.io.handler; + +import com.alibaba.fastjson.JSON; +import com.test.io.vo.MessageVO; +import io.magician.tcp.codec.impl.http.request.MagicianRequest; +import io.magician.tcp.handler.MagicianHandler; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +public class JsonHandler implements MagicianHandler { + + private SimpleDateFormat simpleDateFormat = new SimpleDateFormat("E, dd MMM yyyy H:m:s z", Locale.US); + + @Override + public void request(MagicianRequest magicianRequest) { + String str = simpleDateFormat.format(new Date()); + + MessageVO messageVO = new MessageVO(); + messageVO.setMessage("Hello, World!"); + + magicianRequest.getResponse() + .setResponseHeader("Server","magician") + .setResponseHeader("Date", str) + .sendJson(200, JSON.toJSONString(messageVO)); + } +} diff --git a/frameworks/Java/magician-io/src/main/java/com/test/io/handler/TextHandler.java b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/TextHandler.java new file mode 100644 index 00000000000..6b5640c8b47 --- /dev/null +++ b/frameworks/Java/magician-io/src/main/java/com/test/io/handler/TextHandler.java @@ -0,0 +1,23 @@ +package com.test.io.handler; + +import io.magician.tcp.codec.impl.http.request.MagicianRequest; +import io.magician.tcp.handler.MagicianHandler; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; + +public class TextHandler implements MagicianHandler { + + private SimpleDateFormat simpleDateFormat = new SimpleDateFormat("E, dd MMM yyyy H:m:s z", Locale.US); + + @Override + public void request(MagicianRequest magicianRequest) { + String str = simpleDateFormat.format(new Date()); + + magicianRequest.getResponse() + .setResponseHeader("Server","magician") + .setResponseHeader("Date", str) + .sendText(200, "Hello, World!"); + } +} diff --git a/frameworks/Java/magician-io/src/main/java/com/test/io/vo/MessageVO.java b/frameworks/Java/magician-io/src/main/java/com/test/io/vo/MessageVO.java new file mode 100644 index 00000000000..4e6707974cb --- /dev/null +++ b/frameworks/Java/magician-io/src/main/java/com/test/io/vo/MessageVO.java @@ -0,0 +1,14 @@ +package com.test.io.vo; + +public class MessageVO { + + private String message; + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } +} diff --git a/frameworks/Java/martian/pom.xml b/frameworks/Java/martian/pom.xml index e26a84d4acf..d7bdc8f1b23 100644 --- a/frameworks/Java/martian/pom.xml +++ b/frameworks/Java/martian/pom.xml @@ -17,8 +17,8 @@ com.github.yuyenews - mars-starter - 3.3.3 + Martian + 4.0.2 diff --git a/frameworks/Java/martian/src/main/java/com/text/Start.java b/frameworks/Java/martian/src/main/java/com/text/Start.java index c4cad535558..d32751d7a0a 100644 --- a/frameworks/Java/martian/src/main/java/com/text/Start.java +++ b/frameworks/Java/martian/src/main/java/com/text/Start.java @@ -1,11 +1,13 @@ package com.text; -import com.mars.start.StartMars; +import com.martian.annotation.MartianScan; +import com.martian.starter.StartMartian; import com.text.config.TestConfig; +@MartianScan(scanPackage = "com.text.api") public class Start { public static void main(String[] args) { - StartMars.start(Start.class, new TestConfig()); + StartMartian.start(Start.class, new TestConfig()); } } diff --git a/frameworks/Java/martian/src/main/java/com/text/api/TestApi.java b/frameworks/Java/martian/src/main/java/com/text/api/TestApi.java index c52da3d1666..4e1ec2a28c9 100644 --- a/frameworks/Java/martian/src/main/java/com/text/api/TestApi.java +++ b/frameworks/Java/martian/src/main/java/com/text/api/TestApi.java @@ -1,11 +1,28 @@ package com.text.api; -import com.mars.common.annotation.api.MarsApi; -import com.mars.server.server.request.HttpMarsResponse; +import com.magician.web.core.annotation.Route; import com.text.api.vo.MessageVO; +import io.magician.tcp.codec.impl.http.request.MagicianRequest; -@MarsApi(refBean = "testService") -public interface TestApi { +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Locale; - MessageVO json(HttpMarsResponse response); +public class TestApi { + + private SimpleDateFormat simpleDateFormat = new SimpleDateFormat("E, dd MMM yyyy H:m:s z", Locale.US); + + @Route("/json") + public MessageVO json(MagicianRequest request){ + + String str = simpleDateFormat.format(new Date()); + + request.getResponse() + .setResponseHeader("Server","Martian") + .setResponseHeader("Date", str); + + MessageVO messageVO = new MessageVO(); + messageVO.setMessage("Hello, World!"); + return messageVO; + } } diff --git a/frameworks/Java/martian/src/main/java/com/text/config/TestConfig.java b/frameworks/Java/martian/src/main/java/com/text/config/TestConfig.java index 07a669aa521..789496a6189 100644 --- a/frameworks/Java/martian/src/main/java/com/text/config/TestConfig.java +++ b/frameworks/Java/martian/src/main/java/com/text/config/TestConfig.java @@ -1,24 +1,19 @@ package com.text.config; -import com.mars.common.base.config.MarsConfig; -import com.mars.common.base.config.model.ThreadPoolConfig; +import com.martian.config.MartianConfig; +import io.magician.common.event.EventGroup; -public class TestConfig extends MarsConfig { +import java.util.concurrent.Executors; + +public class TestConfig extends MartianConfig { @Override public int port() { return 8080; } - - @Override - public ThreadPoolConfig threadPoolConfig() { - ThreadPoolConfig threadPoolConfig = new ThreadPoolConfig(); - threadPoolConfig.setCorePoolSize(200); - threadPoolConfig.setMaxPoolSize(10000000); - threadPoolConfig.setKeepAliveTime(20); - threadPoolConfig.setBackLog(2000); - return threadPoolConfig; + public EventGroup workerEventGroup() { + return new EventGroup(10, Executors.newCachedThreadPool()); } } diff --git a/frameworks/Java/martian/src/main/java/com/text/service/TestService.java b/frameworks/Java/martian/src/main/java/com/text/service/TestService.java deleted file mode 100644 index 20ab9f2d428..00000000000 --- a/frameworks/Java/martian/src/main/java/com/text/service/TestService.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.text.service; - -import com.mars.aio.constant.HttpConstant; -import com.mars.aio.server.impl.MarsHttpExchange; -import com.mars.common.annotation.bean.MarsBean; -import com.mars.server.server.request.HttpMarsResponse; -import com.text.api.vo.MessageVO; - -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.Locale; - -@MarsBean -public class TestService { - - private SimpleDateFormat simpleDateFormat = new SimpleDateFormat("E, dd MMM yyyy H:m:s z", Locale.US); - - public MessageVO json(HttpMarsResponse response){ - /* - Because this is a purely front-end and back-end separation framework, - the response header will include cross-domain by default. - In order to adapt to the tfb test, these default headers are removed here. - */ - response.geNativeResponse(MarsHttpExchange.class).getResponseHeaders().clear(); - - // Add the header required by tfb - String str = simpleDateFormat.format(new Date()); - - response.setHeader("Content-Type", HttpConstant.RESPONSE_CONTENT_TYPE); - response.setHeader("Server","Martian"); - response.setHeader("Date", str); - - - MessageVO messageVO = new MessageVO(); - messageVO.setMessage("Hello, World!"); - return messageVO; - } -} diff --git a/frameworks/Java/netty/pom.xml b/frameworks/Java/netty/pom.xml index d146a078773..4179d874f9a 100644 --- a/frameworks/Java/netty/pom.xml +++ b/frameworks/Java/netty/pom.xml @@ -11,7 +11,7 @@ 11 11 - 4.1.59.Final + 4.1.65.Final jar @@ -41,7 +41,7 @@ io.netty.incubator netty-incubator-transport-native-io_uring - 0.0.1.Final + 0.0.5.Final linux-x86_64 @@ -54,7 +54,7 @@ org.javassist javassist - 3.27.0-GA + 3.28.0-GA diff --git a/frameworks/Java/officefloor/README.md b/frameworks/Java/officefloor/README.md index a72c74c30d6..ba31070d957 100755 --- a/frameworks/Java/officefloor/README.md +++ b/frameworks/Java/officefloor/README.md @@ -1,6 +1,6 @@ # OfficeFloor -OfficeFloor provides true inversion of (coupling) control. +OfficeFloor provides [inversion of coupling control](https://dzone.com/articles/inversion-of-coupling-control). > Inversion of Control = Dependency Injection + Continuation Injection + Thread Injection @@ -35,15 +35,18 @@ More information can be found at [http://officefloor.net](http://officefloor.net # OfficeFloor Benchmark Tests -OfficeFloor can use different HTTP server components: +OfficeFloor can use different HTTP server implementations that pass to OfficeFloor inversion of coupling control for request servicing: -* **officefloor-netty** : incorporating Netty to service requests passing to OfficeFloor inversion of control. -* **officefloor-undertow** : incorporating Undertow to service requests passing to OfficeFloor inversion of control. -* **officefloor-raw** : default HTTP server component provided by OfficeFloor. This allows comparing OfficeFloor's raw HTTP implementation with other solutions focused on HTTP optimisation. +* **officefloor-netty** : using Netty +* **officefloor-undertow** : using Undertow +* **officefloor-vertx** : using Vertx -Having these comparisons allows developers to see the trade-offs in using different HTTP components to handle HTTP request servicing. +OfficeFloor can also use various database clients. These use the default HTTP server component provided by OfficeFloor: -Note: OfficeFloor's web plugins are called WoOF (Web on OfficeFloor). +* **officefloor-r2bc** : uses R2DBC. +* **officefloor-sqlclient** : uses Vertx SQL Client. + +Having these comparisons allows developers to see the trade-offs in using different HTTP components and database drivers to handle HTTP request servicing. As mentioned, OfficeFloor uses different threading models. It does not inherit the threading model imposed by the HTTP component. Hence, there are various threading models tested to see trade-offs: @@ -51,29 +54,12 @@ As mentioned, OfficeFloor uses different threading models. It does not inherit * **officefloor-micro** : synchronous multi-threaded model * **officefloor-thread_affinity** : thread affinity of servicing each request only on one CPU -While much focus is on HTTP handling, performance also is impacted by database interaction. The above tests use raw SQL queries to provide optimised through put. However, in "real world" applications ORMs are typically used: +In "real world" applications ORMs are typically used: * **officefloor** : uses JPA * **officefloor-spring_data** : uses Spring Data repositories -# 503 Responses - - Within the performance tests (particularly the query and update tests), there are 503 responses from OfficeFloor. These are deliberate and make OfficeFloor significantly more responsive than typical web servers. To understand why, we need to explain the servicing change introduced by OfficeFloor. - - **Typical web servers** manage load by only processing one request at a time per socket. This means on pipelined requests over a single connection, the first request needs to be fully processed before the next request is started. As requests then queue on the network buffer, it allows the network's natural TCP throttling to manage load. - - **Web browser request profiles have changed** to an increased number of asynchronous requests pipelined over a few number of connections. Days of old (possibly very old), web pages loaded and then did a post to reload the page again. There would be one page submission with a few resource requests (e.g. images). These days, single page applications are a lot more prevalent. The nature of single page applications is to avoid page reloads and run many asynchronous requests. This means more logic submission requests in parallel. As browsers re-use connections to multiplex these requests to the server, the result is pipelining requests over a few number of connections. - - **OfficeFloor processes pipelined requests in parallel**. To cater to this situation and allow more responsive single page web applications, OfficeFloor processes the pipeline requests in parallel. This means the second request in the pipeline no longer has to wait for the first one to complete. Both the first, second, third, etc will be processed immediately by OfficeFloor. This means better responsiveness to the single page applications, as there is no queuing of requests in the pipelines. - - **Load throttling managed by 503 responses.** As requests are no longer queued on the network buffer, there is no TCP throttling. Requests will be accepted in parallel into OfficeFloor. To avoid denial of service attacks, OfficeFloor has built in load throttling. It is beyond this overview discussion to explain how this works, but basically when thread pools are overloaded they reject new jobs. OfficeFloor captures these rejections and translates them into 503 HTTP responses (indicating temporarily unavailable). - - **This improves responsiveness of single page applications**. The problem with TCP throttling is that requests start to slow down before you get rejections. This means your single page application ends up waiting for responses. The wait time is noticeable by the user, as can be upwards of 10's of seconds to minutes. This then results in the user's perception of the single page application being slow. OfficeFloor by parallel processing everything and responding immediately with 503 responses when overloaded, reduces these long wait times. Rather than than hanging, the single page application can deal with the 503 by calling another server or providing user friendly messages back to the user that system is under load and try back in a few moments. This provides an overall more responsive and what we find is a better experience for the user. - - However, this does mean in some of the heavier load tests, you will see errors for OfficeFloor. But we here at OfficeFloor are proud of these errors, as they avoid hanging applications and provide better user experience under extreme loads. - - # OfficeFloor real benefit OfficeFloor is targeted at cloud and SOA/micro-service environments. In these environments, applications do not operate in isolation with a single database. Applications need to interact with multiple other services that result in: diff --git a/frameworks/Java/officefloor/benchmark_config.json b/frameworks/Java/officefloor/benchmark_config.json index 0adb6b647cf..df2a5b20d69 100755 --- a/frameworks/Java/officefloor/benchmark_config.json +++ b/frameworks/Java/officefloor/benchmark_config.json @@ -25,7 +25,7 @@ "notes": "", "versus": "officefloor-micro" }, - "raw": { + "r2dbc": { "json_url": "/json", "plaintext_url": "/plaintext", "db_url": "/db", @@ -44,10 +44,10 @@ "webserver": "WoOF", "os": "Linux", "database_os": "Linux", - "display_name": "OfficeFloor-raw", + "display_name": "OfficeFloor-r2dbc", "notes": "" }, - "async": { + "sqlclient": { "json_url": "/json", "plaintext_url": "/plaintext", "db_url": "/db", @@ -66,9 +66,31 @@ "webserver": "WoOF", "os": "Linux", "database_os": "Linux", + "display_name": "OfficeFloor-sqlclient", + "notes": "" + }, + "async": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/queries?queries=", + "fortune_url": "/fortunes", + "update_url": "/update?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "OfficeFloor", + "language": "Java", + "flavor": "None", + "orm": "raw", + "platform": "OfficeFloor", + "webserver": "WoOF", + "os": "Linux", + "database_os": "Linux", "display_name": "OfficeFloor-async", "notes": "", - "versus": "OfficeFloor-raw" + "versus": "OfficeFloor-r2dbc" }, "micro": { "json_url": "/json", @@ -91,7 +113,7 @@ "database_os": "Linux", "display_name": "OfficeFloor-micro", "notes": "", - "versus": "OfficeFloor-raw" + "versus": "OfficeFloor-r2dbc" }, "thread_affinity": { "json_url": "/json", @@ -114,7 +136,7 @@ "database_os": "Linux", "display_name": "OfficeFloor-thread_affinity", "notes": "", - "versus": "OfficeFloor-raw" + "versus": "OfficeFloor-r2dbc" }, "netty": { "json_url": "/json", @@ -132,12 +154,12 @@ "flavor": "None", "orm": "Full", "platform": "OfficeFloor", - "webserver": "WoOF", + "webserver": "Netty", "os": "Linux", "database_os": "Linux", "display_name": "OfficeFloor-netty", "notes": "", - "versus": "netty" + "versus": "Netty" }, "undertow": { "json_url": "/json", @@ -155,11 +177,35 @@ "flavor": "None", "orm": "Full", "platform": "OfficeFloor", - "webserver": "WoOF", + "webserver": "Undertow", "os": "Linux", "database_os": "Linux", "display_name": "OfficeFloor-undertow", - "notes": "" + "notes": "", + "versus": "undertow-postgresql" + }, + "vertx": { + "json_url": "/json", + "plaintext_url": "/plaintext", + "db_url": "/db", + "query_url": "/queries?queries=", + "fortune_url": "/fortunes", + "update_url": "/update?queries=", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "Postgres", + "framework": "OfficeFloor", + "language": "Java", + "flavor": "None", + "orm": "Full", + "platform": "OfficeFloor", + "webserver": "vertx", + "os": "Linux", + "database_os": "Linux", + "display_name": "OfficeFloor-undertow", + "notes": "", + "versus": "vertx-postgres" }, "spring_data": { "json_url": "/json", @@ -182,7 +228,7 @@ "database_os": "Linux", "display_name": "OfficeFloor-spring_data", "notes": "", - "versus": "officefloor" + "versus": "spring" } } ] diff --git a/frameworks/Java/officefloor/config.toml b/frameworks/Java/officefloor/config.toml index 4f75b2c2bdd..327f2eb9082 100644 --- a/frameworks/Java/officefloor/config.toml +++ b/frameworks/Java/officefloor/config.toml @@ -15,8 +15,8 @@ database_os = "Linux" os = "Linux" orm = "Full" platform = "OfficeFloor" -webserver = "WoOF" -versus = "netty" +webserver = "Netty" +versus = "Netty" [thread_affinity] urls.plaintext = "/plaintext" @@ -69,7 +69,24 @@ platform = "OfficeFloor" webserver = "WoOF" versus = "officefloor-micro" -[raw] +[r2dbc] +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/db" +urls.query = "/queries?queries=" +urls.update = "/update?queries=" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Platform" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "raw" +platform = "OfficeFloor" +webserver = "WoOF" +versus = "None" + +[sqlclient] urls.plaintext = "/plaintext" urls.json = "/json" urls.db = "/db" @@ -100,8 +117,25 @@ database_os = "Linux" os = "Linux" orm = "Full" platform = "OfficeFloor" -webserver = "WoOF" -versus = "None" +webserver = "Undertow" +versus = "undertow-postgresql" + +[vertx] +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/db" +urls.query = "/queries?queries=" +urls.update = "/update?queries=" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Fullstack" +database = "Postgres" +database_os = "Linux" +os = "Linux" +orm = "Full" +platform = "OfficeFloor" +webserver = "vertx" +versus = "vertx-postgres" [spring_data] urls.plaintext = "/plaintext" @@ -118,7 +152,7 @@ os = "Linux" orm = "Full" platform = "OfficeFloor" webserver = "WoOF" -versus = "officefloor" +versus = "spring" [async] urls.plaintext = "/plaintext" @@ -128,11 +162,11 @@ urls.query = "/queries?queries=" urls.update = "/update?queries=" urls.fortune = "/fortunes" approach = "Realistic" -classification = "Platform" +classification = "Fullstack" database = "Postgres" database_os = "Linux" os = "Linux" orm = "raw" platform = "OfficeFloor" webserver = "WoOF" -versus = "OfficeFloor-raw" +versus = "OfficeFloor-r2dbc" diff --git a/frameworks/Java/officefloor/officefloor-raw.dockerfile b/frameworks/Java/officefloor/officefloor-r2dbc.dockerfile similarity index 52% rename from frameworks/Java/officefloor/officefloor-raw.dockerfile rename to frameworks/Java/officefloor/officefloor-r2dbc.dockerfile index 28ceaeded08..b9797a0266a 100644 --- a/frameworks/Java/officefloor/officefloor-raw.dockerfile +++ b/frameworks/Java/officefloor/officefloor-r2dbc.dockerfile @@ -1,12 +1,16 @@ FROM maven:3.6.3 as maven WORKDIR /officefloor COPY src src -WORKDIR /officefloor/src/woof_benchmark_raw +WORKDIR /officefloor/src +RUN mvn -B -N clean install +WORKDIR /officefloor/src/woof_benchmark_woof +RUN mvn -B clean install +WORKDIR /officefloor/src/woof_benchmark_r2dbc RUN mvn -B clean package FROM openjdk:15-slim RUN apt-get update && apt-get install -y libjna-java WORKDIR /officefloor -COPY --from=maven /officefloor/src/woof_benchmark_raw/target/woof_benchmark_raw-1.0.0.jar server.jar +COPY --from=maven /officefloor/src/woof_benchmark_r2dbc/target/woof_benchmark_r2dbc-1.0.0.jar server.jar EXPOSE 8080 CMD ["java", "-Xms2g", "-Xmx2g", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-sqlclient.dockerfile b/frameworks/Java/officefloor/officefloor-sqlclient.dockerfile new file mode 100644 index 00000000000..e29bc666c0d --- /dev/null +++ b/frameworks/Java/officefloor/officefloor-sqlclient.dockerfile @@ -0,0 +1,16 @@ +FROM maven:3.6.3 as maven +WORKDIR /officefloor +COPY src src +WORKDIR /officefloor/src +RUN mvn -B -N clean install +WORKDIR /officefloor/src/woof_benchmark_woof +RUN mvn -B clean install +WORKDIR /officefloor/src/woof_benchmark_sqlclient +RUN mvn -B clean package + +FROM openjdk:15-slim +RUN apt-get update && apt-get install -y libjna-java +WORKDIR /officefloor +COPY --from=maven /officefloor/src/woof_benchmark_sqlclient/target/woof_benchmark_sqlclient-1.0.0.jar server.jar +EXPOSE 8080 +CMD ["java", "-Xms2g", "-Xmx2g", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dvertx.disableMetrics=true", "-Dvertx.threadChecks=false", "-Dvertx.disableContextTimings=true", "-Dvertx.disableTCCL=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/officefloor-vertx.dockerfile b/frameworks/Java/officefloor/officefloor-vertx.dockerfile new file mode 100644 index 00000000000..bc75358f0b5 --- /dev/null +++ b/frameworks/Java/officefloor/officefloor-vertx.dockerfile @@ -0,0 +1,15 @@ +FROM maven:3.6.3 as maven +WORKDIR /officefloor +COPY src src +WORKDIR /officefloor/src +RUN mvn -B -N clean install +WORKDIR /officefloor/src/woof_benchmark +RUN mvn -B clean install +WORKDIR /officefloor/src/woof_benchmark_vertx +RUN mvn -B clean package + +FROM openjdk:15 +WORKDIR /officefloor +COPY --from=maven /officefloor/src/woof_benchmark_vertx/target/woof_benchmark_vertx-1.0.0.jar server.jar +EXPOSE 8080 +CMD ["java", "-server", "-Xms2g", "-Xmx2g", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-Dhttp.port=8080", "-Dhttp.server.name=O", "-Dhttp.date.header=true", "-jar", "server.jar"] diff --git a/frameworks/Java/officefloor/src/pom.xml b/frameworks/Java/officefloor/src/pom.xml index b45e0e99f61..2fe053ccf6a 100644 --- a/frameworks/Java/officefloor/src/pom.xml +++ b/frameworks/Java/officefloor/src/pom.xml @@ -14,12 +14,15 @@ woof_benchmark + woof_benchmark_woof woof_benchmark_micro woof_benchmark_thread_affinity woof_benchmark_async - woof_benchmark_raw + woof_benchmark_r2dbc + woof_benchmark_sqlclient woof_benchmark_netty woof_benchmark_undertow + woof_benchmark_vertx woof_benchmark_spring @@ -39,7 +42,7 @@ net.officefloor bom - 3.31.0 + 3.36.0 pom import @@ -53,6 +56,11 @@ woof_benchmark_micro ${project.version} + + net.officefloor.benchmarks + woof_benchmark_woof + ${project.version} + com.github.spullara.mustache.java compiler diff --git a/frameworks/Java/officefloor/src/woof_benchmark/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_async/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_async/pom.xml index 4ad826d1a7a..7bceeaad774 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_async/pom.xml +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/pom.xml @@ -15,15 +15,11 @@ net.officefloor.persistence - officer2dbc + officevertx_sqlclient - io.r2dbc - r2dbc-postgresql - - - io.r2dbc - r2dbc-pool + io.vertx + vertx-pg-client com.github.spullara.mustache.java diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/java/net/officefloor/benchmark/Logic.java b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/java/net/officefloor/benchmark/Logic.java index b0f860f1a13..f753a273022 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/java/net/officefloor/benchmark/Logic.java +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/java/net/officefloor/benchmark/Logic.java @@ -3,10 +3,10 @@ import java.io.IOException; import java.io.Writer; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.ThreadLocalRandom; -import java.util.function.Consumer; import org.apache.commons.text.StringEscapeUtils; @@ -14,46 +14,27 @@ import com.github.mustachejava.Mustache; import com.github.mustachejava.MustacheFactory; -import io.r2dbc.spi.Batch; -import io.r2dbc.spi.R2dbcTransientResourceException; +import io.vertx.core.CompositeFuture; +import io.vertx.core.Future; +import io.vertx.sqlclient.Pool; +import io.vertx.sqlclient.Row; +import io.vertx.sqlclient.RowIterator; +import io.vertx.sqlclient.RowSet; +import io.vertx.sqlclient.Tuple; import lombok.AllArgsConstructor; import lombok.Data; -import net.officefloor.frame.api.escalate.AsynchronousFlowTimedOutEscalation; import net.officefloor.frame.api.function.AsynchronousFlow; -import net.officefloor.frame.api.function.FlowCallback; -import net.officefloor.plugin.clazz.FlowInterface; -import net.officefloor.r2dbc.R2dbcSource; -import net.officefloor.server.http.HttpException; import net.officefloor.server.http.HttpHeaderValue; import net.officefloor.server.http.HttpResponse; -import net.officefloor.server.http.HttpStatus; import net.officefloor.server.http.ServerHttpConnection; import net.officefloor.web.HttpQueryParameter; import net.officefloor.web.ObjectResponse; -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; /** * Logic. */ public class Logic { - static { - // Increase the buffer size - System.setProperty("reactor.bufferSize.small", String.valueOf(512)); - } - - private static final FlowCallback callback = (error) -> { - if (error == null) { - return; - } else if (error instanceof AsynchronousFlowTimedOutEscalation) { - throw new HttpException(HttpStatus.SERVICE_UNAVAILABLE); - } else { - System.out.println("ERROR: " + error.getClass().getName()); - throw error; - } - }; - /** * {@link Mustache} for /fortunes. */ @@ -97,98 +78,86 @@ public void json(ObjectResponse response) { @Data @AllArgsConstructor public class World { - private int id; - private int randomNumber; } - @FlowInterface - public static interface DbFlows { - void dbService(FlowCallback callback); - } - - public void db(DbFlows flows) { - flows.dbService(callback); - } - - public void dbService(AsynchronousFlow async, R2dbcSource source, ObjectResponse response) - throws SQLException { - source.getConnection().flatMap( - connection -> Mono.from(connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") - .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute())) - .flatMap(result -> Mono.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - Integer number = row.get(1, Integer.class); - return new World(id, number); - }))).subscribe(world -> async.complete(() -> { - response.send(world); - }), handleError(async)); + public void db(AsynchronousFlow async, Pool pool, ObjectResponse response) throws SQLException { + Future future = pool.withConnection((connection) -> { + return connection.preparedQuery("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID=$1") + .execute(Tuple.of(ThreadLocalRandom.current().nextInt(1, 10001))).map((rowSet) -> { + RowIterator rows = rowSet.iterator(); + if (!rows.hasNext()) { + return null; + } + Row row = rows.next(); + return new World(row.getInteger(0), row.getInteger(1)); + }); + }); + complete(async, future, (world) -> response.send(world)); } // ========== QUERIES ================== - @FlowInterface - public static interface QueriesFlows { - void queriesService(FlowCallback callback); - } - - public void queries(QueriesFlows flows) { - flows.queriesService(callback); - } - - public void queriesService(@HttpQueryParameter("queries") String queries, AsynchronousFlow async, - R2dbcSource source, ObjectResponse> response) { + public void queries(@HttpQueryParameter("queries") String queries, AsynchronousFlow async, Pool pool, + ObjectResponse> response) { int queryCount = getQueryCount(queries); - source.getConnection().flatMap(connection -> { - return Flux.range(1, queryCount) - .flatMap(index -> connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") - .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) - .flatMap(result -> Flux.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - Integer number = row.get(1, Integer.class); - return new World(id, number); - }))).collectList(); - }).subscribe(worlds -> async.complete(() -> { - response.send(worlds); - }), handleError(async)); + Future future = pool.withConnection((connection) -> { + @SuppressWarnings("rawtypes") + List futures = new ArrayList<>(queryCount); + for (int i = 0; i < queryCount; i++) { + futures.add(connection.preparedQuery("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID=$1") + .execute(Tuple.of(ThreadLocalRandom.current().nextInt(1, 10001))).map((rowSet) -> { + RowIterator rows = rowSet.iterator(); + if (!rows.hasNext()) { + return null; + } + Row row = rows.next(); + return new World(row.getInteger(0), row.getInteger(1)); + })); + } + return CompositeFuture.all(futures); + }); + complete(async, future, (worlds) -> response.send(worlds.list())); } // =========== UPDATES =================== - @FlowInterface - public static interface UpdateFlows { - void updateService(FlowCallback callback); - } - - public void update(UpdateFlows flows) { - flows.updateService(callback); - } - - public void updateService(@HttpQueryParameter("queries") String queries, AsynchronousFlow async, R2dbcSource source, + public void update(@HttpQueryParameter("queries") String queries, AsynchronousFlow async, Pool pool, ObjectResponse> response) { int queryCount = getQueryCount(queries); - source.getConnection().flatMap(connection -> { - return Flux.range(1, queryCount) - .flatMap(index -> connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") - .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) - .flatMap(result -> Flux.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - Integer number = row.get(1, Integer.class); - return new World(id, number); - }))).collectList().flatMap(worlds -> { - Collections.sort(worlds, (a, b) -> a.id - b.id); - Batch batch = connection.createBatch(); - for (World world : worlds) { - world.randomNumber = ThreadLocalRandom.current().nextInt(1, 10001); - batch.add("UPDATE WORLD SET RANDOMNUMBER = " + world.randomNumber + " WHERE ID = " - + world.id); - } - return Mono.from(batch.execute()).map((result) -> worlds); - }); - }).subscribe(worlds -> async.complete(() -> { - response.send(worlds); - }), handleError(async)); + Future> future = pool.withConnection((connection) -> { + @SuppressWarnings("rawtypes") + List futures = new ArrayList<>(queryCount); + + // Run queries to get the worlds + for (int i = 0; i < queryCount; i++) { + futures.add(connection.preparedQuery("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID=$1") + .execute(Tuple.of(ThreadLocalRandom.current().nextInt(1, 10001))).map((rowSet) -> { + RowIterator rows = rowSet.iterator(); + if (!rows.hasNext()) { + return null; + } + Row row = rows.next(); + return new World(row.getInteger(0), ThreadLocalRandom.current().nextInt(1, 10001)); + })); + } + return CompositeFuture.all(futures).flatMap((compositeFuture) -> { + List worlds = compositeFuture.list(); + + // Sort worlds to avoid deadlocks on updates + Collections.sort(worlds, (a, b) -> a.id - b.id); + + // All worlds obtained, so run update + List batch = new ArrayList<>(queryCount); + for (World update : worlds) { + batch.add(Tuple.of(update.randomNumber, update.id)); + } + return connection.preparedQuery("UPDATE world SET randomnumber=$1 WHERE id=$2").executeBatch(batch) + .map((updates) -> worlds); + }); + }); + complete(async, future, (worlds) -> response.send(worlds)); } // =========== FORTUNES ================== @@ -204,25 +173,19 @@ public class Fortune { private String message; } - @FlowInterface - public static interface FortunesFlows { - void fortunesService(FlowCallback callback); - } - - public void fortunes(FortunesFlows flows) { - flows.fortunesService(callback); - } - - public void fortunesService(AsynchronousFlow async, R2dbcSource source, ServerHttpConnection httpConnection) - throws IOException, SQLException { - source.getConnection().flatMap(connection -> { - return Flux.from(connection.createStatement("SELECT ID, MESSAGE FROM FORTUNE").execute()) - .flatMap(result -> Flux.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - String message = row.get(1, String.class); - return new Fortune(id, message); - }))).collectList(); - }).subscribe(fortunes -> async.complete(() -> { + public void fortunes(AsynchronousFlow async, Pool pool, ServerHttpConnection httpConnection) { + Future> future = pool.withConnection((connection) -> { + return connection.preparedQuery("SELECT ID, MESSAGE FROM FORTUNE").execute(); + }); + complete(async, future, (rowSet) -> { + + // Obtain the fortunes + List fortunes = new ArrayList<>(16); + RowIterator rows = rowSet.iterator(); + while (rows.hasNext()) { + Row row = rows.next(); + fortunes.add(new Fortune(row.getInteger(0), row.getString(1))); + } // Additional fortunes fortunes.add(new Fortune(0, "Additional fortune added at request time.")); @@ -232,8 +195,7 @@ public void fortunesService(AsynchronousFlow async, R2dbcSource source, ServerHt HttpResponse response = httpConnection.getResponse(); response.setContentType(TEXT_HTML, null); this.fortuneMustache.execute(response.getEntityWriter(), fortunes); - - }), handleError(async)); + }); } // =========== helper =================== @@ -247,12 +209,20 @@ private static int getQueryCount(String queries) { } } - private static Consumer handleError(AsynchronousFlow async) { - return (error) -> async.complete(() -> { - try { - throw error; - } catch (R2dbcTransientResourceException | AsynchronousFlowTimedOutEscalation overloadEx) { - throw new HttpException(HttpStatus.SERVICE_UNAVAILABLE); + @FunctionalInterface + private static interface Completion { + void complete(T result) throws Exception; + } + + private static void complete(AsynchronousFlow async, Future future, Completion writeResponse) { + future.onComplete(result -> { + if (result.failed()) { + async.complete(() -> { + result.cause().printStackTrace(); + throw result.cause(); + }); + } else { + async.complete(() -> writeResponse.complete(result.result())); } }); } diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/java/net/officefloor/benchmark/SetupVertxSqlClient.java b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/java/net/officefloor/benchmark/SetupVertxSqlClient.java new file mode 100644 index 00000000000..0f64203a17d --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/java/net/officefloor/benchmark/SetupVertxSqlClient.java @@ -0,0 +1,36 @@ +package net.officefloor.benchmark; + +import io.vertx.sqlclient.PoolOptions; +import net.officefloor.frame.api.source.ServiceContext; +import net.officefloor.vertx.sqlclient.VertxSqlPoolConfigurer; +import net.officefloor.vertx.sqlclient.VertxSqlPoolConfigurerContext; +import net.officefloor.vertx.sqlclient.VertxSqlPoolConfigurerServiceFactory; + +/** + * Sets up the {@link PoolOptions}. + * + * @author Daniel Sagenschneider + */ +public class SetupVertxSqlClient implements VertxSqlPoolConfigurer, VertxSqlPoolConfigurerServiceFactory { + + /* + * ================ VertxSqlPoolConfigurerServiceFactory ================= + */ + + @Override + public VertxSqlPoolConfigurer createService(ServiceContext context) throws Throwable { + return this; + } + + /* + * ======================= VertxSqlPoolConfigurer ========================= + */ + + @Override + public void configure(VertxSqlPoolConfigurerContext context) throws Exception { + final int MAX_POOL_SIZE = 512; + System.out.println("Setting max pool size to " + MAX_POOL_SIZE); + context.getPoolOptions().setMaxSize(MAX_POOL_SIZE); + } + +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/META-INF/services/net.officefloor.vertx.sqlclient.VertxSqlPoolConfigurerServiceFactory b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/META-INF/services/net.officefloor.vertx.sqlclient.VertxSqlPoolConfigurerServiceFactory new file mode 100644 index 00000000000..24a2dc59f37 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/META-INF/services/net.officefloor.vertx.sqlclient.VertxSqlPoolConfigurerServiceFactory @@ -0,0 +1 @@ +net.officefloor.benchmark.SetupVertxSqlClient \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.objects b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.objects index 9da3d44aef7..3bdb00c5c5a 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.objects +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.objects @@ -1,6 +1,6 @@ - + diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.woof b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.woof index 60ad940da1a..e2461a9ccad 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.woof +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/application.woof @@ -1,22 +1,22 @@ -
+
-
+
-
+
-
+
-
+
-
+
@@ -24,17 +24,13 @@ -
+
- - - -
diff --git a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/datasource.properties b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/datasource.properties index 11d4c2c5615..3025d2e6198 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/datasource.properties +++ b/frameworks/Java/officefloor/src/woof_benchmark_async/src/main/resources/datasource.properties @@ -1,8 +1,5 @@ -driver=pool -protocol=postgresql host=tfb-database port=5432 database=hello_world -user=benchmarkdbuser -password=benchmarkdbpass -maxSize=512 \ No newline at end of file +username=benchmarkdbuser +password=benchmarkdbpass \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_micro/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_micro/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_micro/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_netty/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_netty/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_netty/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_raw/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/pom.xml similarity index 60% rename from frameworks/Java/officefloor/src/woof_benchmark_raw/pom.xml rename to frameworks/Java/officefloor/src/woof_benchmark_r2dbc/pom.xml index 200c18ed680..66233b0787a 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_raw/pom.xml +++ b/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/pom.xml @@ -7,19 +7,11 @@ benchmarks 1.0.0 - woof_benchmark_raw + woof_benchmark_r2dbc - net.officefloor.web - woof - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.module - jackson-module-afterburner + net.officefloor.benchmarks + woof_benchmark_woof io.r2dbc @@ -29,21 +21,6 @@ io.r2dbc r2dbc-pool - - net.officefloor.web - officeweb_executive - - - net.officefloor.web - officeweb - - - - - org.projectlombok - lombok - provided - @@ -59,7 +36,7 @@ - net.officefloor.benchmark.RawOfficeFloorMain + net.officefloor.benchmark.R2dbcOfficeFloorMain diff --git a/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/src/main/java/net/officefloor/benchmark/R2dbcOfficeFloorMain.java b/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/src/main/java/net/officefloor/benchmark/R2dbcOfficeFloorMain.java new file mode 100644 index 00000000000..002a54fbbc0 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/src/main/java/net/officefloor/benchmark/R2dbcOfficeFloorMain.java @@ -0,0 +1,300 @@ +package net.officefloor.benchmark; + +import java.util.Collections; +import java.util.concurrent.Executor; +import java.util.concurrent.ThreadLocalRandom; + +import io.netty.channel.unix.Socket; +import io.r2dbc.pool.PoolingConnectionFactoryProvider; +import io.r2dbc.spi.Batch; +import io.r2dbc.spi.Connection; +import io.r2dbc.spi.ConnectionFactories; +import io.r2dbc.spi.ConnectionFactory; +import io.r2dbc.spi.ConnectionFactoryOptions; +import net.officefloor.server.RequestHandler; +import net.officefloor.server.http.HttpResponse; +import net.officefloor.server.http.ServerHttpConnection; +import net.officefloor.server.http.parse.HttpRequestParser; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.core.scheduler.Scheduler; +import reactor.core.scheduler.Schedulers; + +/** + * R2DBC server. + * + * @author Daniel Sagenschneider + */ +public class R2dbcOfficeFloorMain implements DatabaseOperations { + + /** + * Database query load capacity to handle validation load. + */ + private static final int QUERY_LOAD_CAPACITY = 512 * (20 + 1); // update 20 selects then batch + + /** + * Buffer size of queries. + */ + private static final int QUERY_BUFFER_SIZE = 512; + + /** + * Run application. + */ + public static void main(String[] args) throws Exception { + + // Increase the buffer size (note: too high and cause OOM issues) + System.setProperty("reactor.bufferSize.small", String.valueOf(QUERY_BUFFER_SIZE)); + + // Run the WoOF server + RawWoof.run(args, (socketCount, server, port, database, username, + password) -> new R2dbcOfficeFloorMain(socketCount, server, port, database, username, password)); + } + + /** + * {@link ThreadLocal} {@link RateLimit}. + */ + private final ThreadLocal threadLocalRateLimit = new ThreadLocal(); + + /** + * {@link ThreadLocal} {@link Connection} instances. + */ + private final ThreadLocal threadLocalConnections; + + /** + * Instantiate. + * + * @param socketCount Number of server {@link Socket} instances. + * @param server Name of database server. + * @param port Port of database. + * @param database Name of database within server. + * @param username Username. + * @param password Password. + */ + public R2dbcOfficeFloorMain(int socketCount, String server, int port, String database, String username, + String password) { + + // Must have enough connection capacity for initial load (+1 for rounding) + int requiredConnectionsPerSocket = (QUERY_LOAD_CAPACITY / (socketCount * QUERY_BUFFER_SIZE)) + 1; + int connectionsPerSocket = Math.max(4, requiredConnectionsPerSocket); + System.out.println("Using " + connectionsPerSocket + " connections per socket"); + + // Determine the pool size for connections + int connectionPoolSize = socketCount * connectionsPerSocket; + + // Build the connection pool + ConnectionFactoryOptions factoryOptions = ConnectionFactoryOptions.builder() + .option(ConnectionFactoryOptions.DRIVER, "pool").option(ConnectionFactoryOptions.PROTOCOL, "postgresql") + .option(ConnectionFactoryOptions.HOST, server).option(ConnectionFactoryOptions.PORT, port) + .option(ConnectionFactoryOptions.DATABASE, database).option(ConnectionFactoryOptions.USER, username) + .option(ConnectionFactoryOptions.PASSWORD, password) + .option(PoolingConnectionFactoryProvider.MAX_SIZE, connectionPoolSize).build(); + ConnectionFactory connectionFactory = ConnectionFactories.get(factoryOptions); + + // Create thread local connection + this.threadLocalConnections = new ThreadLocal() { + @Override + protected Connection[] initialValue() { + Connection[] connections = new Connection[connectionsPerSocket]; + for (int i = 0; i < connections.length; i++) { + connections[i] = Mono.from(connectionFactory.create()).block(); + } + return connections; + } + }; + } + + /* + * ===================== DatabaseOperations ====================== + */ + + @Override + public void threadSetup(RequestHandler requestHandler) { + + // Ensure rate limits for socket servicing thread + // Note: will always create before servicing any requests + if (this.threadLocalRateLimit.get() == null) { + Connection[] connections = this.threadLocalConnections.get(); + RateLimit rateLimit = new RateLimit(requestHandler, connections); + this.threadLocalRateLimit.set(rateLimit); + } + } + + @Override + public void db(HttpResponse response, ServerHttpConnection connection, DatabaseOperationsContext context) { + + // Determine if will overload queries + RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1); + if (conn == null) { + context.sendError(connection, context.getTransientResourceException()); + return; // rate limited + } + + // Service + Mono.from(conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") + .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) + .flatMap(result -> Mono.from(result.map((row, metadata) -> { + Integer id = row.get(0, Integer.class); + Integer number = row.get(1, Integer.class); + return new World(id, number); + }))).publishOn(conn.writeScheduler).subscribe(world -> { + context.dbSend(response, connection, world); + }, error -> { + context.sendError(connection, error); + }, () -> { + conn.processed(1); + }); + } + + @Override + public void queries(int queryCount, HttpResponse response, ServerHttpConnection connection, + DatabaseOperationsContext context) { + + // Determine if will overload queries + RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(queryCount); + if (conn == null) { + context.sendError(connection, context.getTransientResourceException()); + return; // rate limited + } + + // Service + Flux.range(1, queryCount) + .flatMap(index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") + .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) + .flatMap(result -> Flux.from(result.map((row, metadata) -> { + Integer id = row.get(0, Integer.class); + Integer number = row.get(1, Integer.class); + return new World(id, number); + }))).collectList().publishOn(conn.writeScheduler).subscribe(worlds -> { + context.queriesSend(response, connection, worlds); + }, error -> { + context.sendError(connection, error); + }, () -> { + conn.processed(queryCount); + }); + } + + @Override + public void fortunes(HttpResponse response, ServerHttpConnection connection, DatabaseOperationsContext context) { + + // Determine if will overload queries + RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1); + if (conn == null) { + context.sendError(connection, context.getTransientResourceException()); + return; // rate limited + } + + // Service + Flux.from(conn.connection.createStatement("SELECT ID, MESSAGE FROM FORTUNE").execute()) + .flatMap(result -> Flux.from(result.map((row, metadata) -> { + Integer id = row.get(0, Integer.class); + String message = row.get(1, String.class); + return new Fortune(id, message); + }))).collectList().publishOn(conn.writeScheduler).subscribe(fortunes -> { + context.fortunesSend(response, connection, fortunes); + }, error -> { + context.sendError(connection, error); + }, () -> { + conn.processed(1); + }); + } + + @Override + public void update(int queryCount, HttpResponse response, ServerHttpConnection connection, + DatabaseOperationsContext context) { + + int executeQueryCount = queryCount + 1; // select all and update + + // Determine if will overload queries + RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(executeQueryCount); + if (conn == null) { + context.sendError(connection, context.getTransientResourceException()); + return; // rate limited + } + + // Service + Flux.range(1, queryCount) + .flatMap(index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") + .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) + .flatMap(result -> Flux.from(result.map((row, metadata) -> { + Integer id = row.get(0, Integer.class); + Integer number = row.get(1, Integer.class); + return new World(id, number); + }))).collectList().flatMap(worlds -> { + Collections.sort(worlds, (a, b) -> a.id - b.id); + Batch batch = conn.connection.createBatch(); + for (World world : worlds) { + world.randomNumber = ThreadLocalRandom.current().nextInt(1, 10001); + batch.add("UPDATE WORLD SET RANDOMNUMBER = " + world.randomNumber + " WHERE ID = " + world.id); + } + return Mono.from(batch.execute()).map((result) -> worlds); + }).publishOn(conn.writeScheduler).subscribe(worlds -> { + context.updateSend(response, connection, worlds); + }, error -> { + context.sendError(connection, error); + }, () -> { + conn.processed(executeQueryCount); + }); + } + + private static class RateLimit { + + private final RateLimitedConnection[] rateLimitedConnections; + + private final Executor socketExecutor; + + private RateLimit(RequestHandler requestHandler, Connection[] connections) { + + // Create the write scheduler + this.socketExecutor = (runnable) -> requestHandler.execute(() -> { + runnable.run(); + }); + Scheduler writeScheduler = Schedulers.fromExecutor(this.socketExecutor); + + // Create the rate limited connections + this.rateLimitedConnections = new RateLimitedConnection[connections.length]; + for (int i = 0; i < this.rateLimitedConnections.length; i++) { + this.rateLimitedConnections[i] = new RateLimitedConnection(connections[i], writeScheduler); + } + } + + private RateLimitedConnection getAvailableConnection(int queryCount) { + + // Determine available connection for limit + for (int i = 0; i < this.rateLimitedConnections.length; i++) { + RateLimitedConnection connection = this.rateLimitedConnections[i]; + + // Determine if query count reached + int newCount = connection.activeQueries + queryCount; + if (newCount <= QUERY_BUFFER_SIZE) { + // Connection available for load + connection.activeQueries = newCount; + return connection; + } + } + + // As here, no available connection + return null; + } + } + + private static class RateLimitedConnection { + + private final Scheduler writeScheduler; + + private final Connection connection; + + private int activeQueries; + + private RateLimitedConnection(Connection connection, Scheduler writeScheduler) { + this.connection = connection; + this.writeScheduler = writeScheduler; + } + + private void processed(int queryCount) { + + // Update the active queries + this.activeQueries -= queryCount; + } + } + +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/resources/fortunes.mustache b/frameworks/Java/officefloor/src/woof_benchmark_r2dbc/src/main/resources/fortunes.mustache similarity index 100% rename from frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/resources/fortunes.mustache rename to frameworks/Java/officefloor/src/woof_benchmark_r2dbc/src/main/resources/fortunes.mustache diff --git a/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/java/net/officefloor/benchmark/RawOfficeFloorMain.java b/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/java/net/officefloor/benchmark/RawOfficeFloorMain.java deleted file mode 100644 index 7dadac67615..00000000000 --- a/frameworks/Java/officefloor/src/woof_benchmark_raw/src/main/java/net/officefloor/benchmark/RawOfficeFloorMain.java +++ /dev/null @@ -1,734 +0,0 @@ -/* - * OfficeFloor - http://www.officefloor.net - * Copyright (C) 2005-2018 Daniel Sagenschneider - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ -package net.officefloor.benchmark; - -import java.io.IOException; -import java.io.PrintWriter; -import java.io.Writer; -import java.nio.ByteBuffer; -import java.nio.channels.CancelledKeyException; -import java.nio.channels.ClosedChannelException; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.Executor; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.TimeUnit; -import java.util.logging.Logger; - -import org.apache.commons.text.StringEscapeUtils; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.module.afterburner.AfterburnerModule; - -import io.r2dbc.pool.PoolingConnectionFactoryProvider; -import io.r2dbc.spi.Batch; -import io.r2dbc.spi.Connection; -import io.r2dbc.spi.ConnectionFactories; -import io.r2dbc.spi.ConnectionFactory; -import io.r2dbc.spi.ConnectionFactoryOptions; -import io.r2dbc.spi.R2dbcTransientResourceException; -import lombok.AllArgsConstructor; -import lombok.Data; -import net.officefloor.benchmark.RawOfficeFloorMain.Fortune; -import net.officefloor.benchmark.RawOfficeFloorMain.Message; -import net.officefloor.benchmark.RawOfficeFloorMain.World; -import net.officefloor.frame.api.manage.OfficeFloor; -import net.officefloor.frame.api.manage.ProcessManager; -import net.officefloor.frame.api.managedobject.ManagedObjectContext; -import net.officefloor.frame.api.managedobject.ProcessSafeOperation; -import net.officefloor.frame.api.managedobject.pool.ThreadCompletionListener; -import net.officefloor.server.RequestHandler; -import net.officefloor.server.SocketManager; -import net.officefloor.server.SocketServicer; -import net.officefloor.server.http.AbstractHttpServicerFactory; -import net.officefloor.server.http.HttpHeaderName; -import net.officefloor.server.http.HttpHeaderValue; -import net.officefloor.server.http.HttpRequest; -import net.officefloor.server.http.HttpResponse; -import net.officefloor.server.http.HttpResponseHeaders; -import net.officefloor.server.http.HttpServerLocation; -import net.officefloor.server.http.HttpServerSocketManagedObjectSource; -import net.officefloor.server.http.HttpStatus; -import net.officefloor.server.http.ServerHttpConnection; -import net.officefloor.server.http.impl.HttpServerLocationImpl; -import net.officefloor.server.http.impl.ProcessAwareServerHttpConnectionManagedObject; -import net.officefloor.server.http.parse.HttpRequestParser; -import net.officefloor.server.http.parse.HttpRequestParser.HttpRequestParserMetaData; -import net.officefloor.server.stream.ServerWriter; -import net.officefloor.server.stream.impl.ThreadLocalStreamBufferPool; -import net.officefloor.web.executive.CpuCore; -import net.officefloor.web.executive.CpuCore.LogicalCpu; -import net.openhft.affinity.Affinity; -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; -import reactor.core.scheduler.Scheduler; -import reactor.core.scheduler.Schedulers; - -import net.officefloor.web.executive.CpuCore; -import net.openhft.affinity.Affinity; - -/** - *

- * {@link SocketManager} raw performance. - *

- * Allows determining the overhead of the {@link OfficeFloor} framework. - */ -public class RawOfficeFloorMain { - - /** - * Database query load capacity to handle validation load. - */ - private static final int QUERY_LOAD_CAPACITY = 512 * (20 + 1); // update 20 selects then batch - - /** - * Buffer size of queries. - */ - private static final int QUERY_BUFFER_SIZE = 512; - - /** - * {@link SocketManager}. - */ - public static SocketManager socketManager = null; - - /** - * {@link Logger}. - */ - private static Logger logger = Logger.getLogger(RawOfficeFloorMain.class.getName()); - - /** - * Run application. - */ - public static void main(String[] args) throws Exception { - - // Obtain the port from properties - int port = args.length > 0 ? Integer.parseInt(args[0]) : 8080; - - // Ensure previous socket manager shutdown (typically from tests) - if (socketManager != null) { - socketManager.shutdown(); - } - - // Indicate details - String server = System.getProperty("OFFICE.net_officefloor_jdbc_DataSourceManagedObjectSource.server", - "tfb-database"); - System.out.println("Starting server on port " + port + " talking to database " + server); - - // Increase the buffer size (note: too high and cause OOM issues) - System.setProperty("reactor.bufferSize.small", String.valueOf(QUERY_BUFFER_SIZE)); - - // Create the server location - HttpServerLocation serverLocation = new HttpServerLocationImpl("localhost", port, -1); - - // Create a thread factory per logical CPU - ThreadCompletionListener[] threadCompletionListenerCapture = new ThreadCompletionListener[] { null }; - List threadFactories = new LinkedList<>(); - for (CpuCore cpuCore : CpuCore.getCores()) { - for (CpuCore.LogicalCpu logicalCpu : cpuCore.getCpus()) { - - // Create thread factory for logical CPU - ThreadFactory boundThreadFactory = (runnable) -> new Thread(() -> { - ThreadLocalStreamBufferPool bufferPool = (ThreadLocalStreamBufferPool) threadCompletionListenerCapture[0]; - try { - // Bind thread to logical CPU - Affinity.setAffinity(logicalCpu.getCpuAffinity()); - - // Set up for thread local buffer pooling - bufferPool.activeThreadLocalPooling(); - - // Run logic for thread - runnable.run(); - } finally { - bufferPool.threadComplete(); - } - }); - - // Add the thread factory - threadFactories.add(boundThreadFactory); - } - } - ThreadFactory[] executionStrategy = threadFactories.toArray(new ThreadFactory[0]); - System.out.println("Using " + executionStrategy.length + " executors"); - - // Create the socket manager - socketManager = HttpServerSocketManagedObjectSource.createSocketManager(executionStrategy, - (threadCompletionListener) -> threadCompletionListenerCapture[0] = threadCompletionListener); - - // Must have enough connection capacity for initial load (+1 for rounding) - int requiredConnectionsPerSocket = (QUERY_LOAD_CAPACITY / (executionStrategy.length * QUERY_BUFFER_SIZE)) + 1; - int connectionsPerSocket = Math.max(4, requiredConnectionsPerSocket); - System.out.println("Using " + connectionsPerSocket + " connections per socket"); - - // Determine the pool size for connections - int connectionPoolSize = executionStrategy.length * connectionsPerSocket; - - // Build the connection pool - ConnectionFactoryOptions factoryOptions = ConnectionFactoryOptions.builder() - .option(ConnectionFactoryOptions.DRIVER, "pool").option(ConnectionFactoryOptions.PROTOCOL, "postgresql") - .option(ConnectionFactoryOptions.HOST, server).option(ConnectionFactoryOptions.PORT, 5432) - .option(ConnectionFactoryOptions.DATABASE, "hello_world") - .option(ConnectionFactoryOptions.USER, "benchmarkdbuser") - .option(ConnectionFactoryOptions.PASSWORD, "benchmarkdbpass") - .option(PoolingConnectionFactoryProvider.MAX_SIZE, connectionPoolSize).build(); - ConnectionFactory connectionFactory = ConnectionFactories.get(factoryOptions); - - // Create raw HTTP servicing - RawHttpServicerFactory serviceFactory = new RawHttpServicerFactory(serverLocation, connectionFactory, - connectionsPerSocket); - socketManager.bindServerSocket(serverLocation.getClusterHttpPort(), null, null, serviceFactory, serviceFactory); - - // Setup Date - ScheduledExecutorService dateTimer = Executors.newScheduledThreadPool(1); - dateTimer.scheduleAtFixedRate(serviceFactory.updateDate, 0, 1, TimeUnit.SECONDS); - - // Start servicing - Runnable[] runnables = socketManager.getRunnables(); - for (int i = 0; i < runnables.length; i++) { - executionStrategy[i].newThread(runnables[i]).start(); - } - Thread.sleep(1000); // allow threads to start up - - // Indicate running - System.out.println("OfficeFloor raw running on port " + serverLocation.getClusterHttpPort()); - } - - /** - * Raw {@link AbstractHttpServicerFactory}. - */ - private static class RawHttpServicerFactory extends AbstractHttpServicerFactory { - - private static HttpHeaderName NAME_SERVER = new HttpHeaderName("Server"); - - private static HttpHeaderValue VALUE_SERVER = new HttpHeaderValue("O"); - - private static HttpHeaderName NAME_DATE = new HttpHeaderName("Date"); - - private static byte[] HELLO_WORLD = "Hello, World!".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); - - private static final HttpHeaderValue APPLICATION_JSON = new HttpHeaderValue("application/json"); - - private static final HttpHeaderValue TEXT_PLAIN = new HttpHeaderValue("text/plain"); - - private static final HttpHeaderValue TEXT_HTML = new HttpHeaderValue("text/html;charset=utf-8"); - - private static final String QUERIES_PATH_PREFIX = "/queries?queries="; - - private static final String UPDATE_PATH_PREFIX = "/update?queries="; - - private static final byte[] TEMPLATE_START = "Fortunes" - .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); - - private static final byte[] FORTUNE_START = "".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); - - private static final byte[] TEMPLATE_END = "
idmessage
".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); - - private static final byte[] FORTUNE_MIDDLE = "".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); - - private static final byte[] FORTUNE_END = "
" - .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); - - private static final R2dbcTransientResourceException THROTTLED = new R2dbcTransientResourceException(); - - /** - * Date {@link HttpHeaderValue}. - */ - private volatile HttpHeaderValue dateHttpHeader; - - private final Runnable updateDate = () -> { - String now = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now(ZoneOffset.UTC)); - RawHttpServicerFactory.this.dateHttpHeader = new HttpHeaderValue(now); - }; - - /** - * {@link ObjectMapper}. - */ - private final ObjectMapper objectMapper = new ObjectMapper(); - - /** - * {@link ManagedObjectContext}. - */ - private static ManagedObjectContext managedObjectContext = new ManagedObjectContext() { - - @Override - public String getBoundName() { - return RawOfficeFloorMain.class.getSimpleName(); - } - - @Override - public Logger getLogger() { - return logger; - } - - @Override - public R run(ProcessSafeOperation operation) throws T { - return operation.run(); - } - }; - - /** - * {@link ConnectionFactory}. - */ - private final ConnectionFactory connectionFactory; - - /** - * {@link ThreadLocal} {@link Connection} instances. - */ - private final ThreadLocal threadLocalConnections; - - /** - * {@link ThreadLocal} {@link RateLimit}. - */ - private final ThreadLocal threadLocalRateLimit = new ThreadLocal(); - - /** - * Instantiate. - * - * @param serverLocation {@link HttpServerLocation}. - * @param connectionFactory {@link ConnectionFactory}. - * @param connectionsPerSocket Number of DB connections per socket. - */ - public RawHttpServicerFactory(HttpServerLocation serverLocation, ConnectionFactory connectionFactory, - int connectionsPerSocket) { - super(serverLocation, false, new HttpRequestParserMetaData(100, 1000, 1000000), null, null, true); - this.objectMapper.registerModule(new AfterburnerModule()); - this.connectionFactory = connectionFactory; - - // Create thread local connection - this.threadLocalConnections = new ThreadLocal() { - @Override - protected Connection[] initialValue() { - Connection[] connections = new Connection[connectionsPerSocket]; - for (int i = 0; i < connections.length; i++) { - connections[i] = Mono.from(RawHttpServicerFactory.this.connectionFactory.create()).block(); - } - return connections; - } - }; - } - - /** - * Sends the {@link HttpResponse}. - * - * @param connection {@link ServerHttpConnection}. - * @throws IOException If fails to send. - */ - protected void send(ProcessAwareServerHttpConnectionManagedObject connection) throws IOException { - try { - connection.getServiceFlowCallback().run(null); - } catch (IOException ex) { - throw ex; - } catch (Throwable ex) { - throw new IOException(ex); - } - } - - /* - * =============== SocketServicerFactory ================= - */ - - @Override - public SocketServicer createSocketServicer( - RequestHandler requestHandler) { - - // Ensure rate limits for socket servicing thread - // Note: will always create before servicing any requests - if (this.threadLocalRateLimit.get() == null) { - Connection[] connections = this.threadLocalConnections.get(); - RateLimit rateLimit = new RateLimit(requestHandler, connections); - this.threadLocalRateLimit.set(rateLimit); - } - - // Continue on to create socket servicer - return super.createSocketServicer(requestHandler); - } - - /* - * ===================== HttpServicer ==================== - */ - - @Override - protected ProcessManager service(ProcessAwareServerHttpConnectionManagedObject connection) - throws IOException { - - // Configure context - connection.setManagedObjectContext(managedObjectContext); - - // Service the connection - HttpRequest request = connection.getRequest(); - HttpResponse response = connection.getResponse(); - - // Provider Server and Date - HttpResponseHeaders headers = response.getHeaders(); - headers.addHeader(NAME_SERVER, VALUE_SERVER); - headers.addHeader(NAME_DATE, this.dateHttpHeader); - - // Determine request - String requestUri = request.getUri(); - switch (requestUri) { - - case "/plaintext": - this.plaintext(response, connection); - break; - - case "/json": - this.json(response, connection); - break; - - case "/db": - this.db(response, connection); - break; - - case "/fortunes": - this.fortunes(response, connection); - break; - - default: - // Provide redirect - if (requestUri.startsWith(QUERIES_PATH_PREFIX)) { - this.queries(requestUri, response, connection); - - } else if (requestUri.startsWith(UPDATE_PATH_PREFIX)) { - this.update(requestUri, response, connection); - - } else { - // Unknown request - response.setStatus(HttpStatus.NOT_FOUND); - this.send(connection); - } - break; - } - - // No process management - return null; - } - - private void plaintext(HttpResponse response, - ProcessAwareServerHttpConnectionManagedObject connection) throws IOException { - response.setContentType(TEXT_PLAIN, null); - response.getEntity().write(HELLO_WORLD); - this.send(connection); - } - - private void json(HttpResponse response, ProcessAwareServerHttpConnectionManagedObject connection) - throws IOException { - response.setContentType(APPLICATION_JSON, null); - this.objectMapper.writeValue(response.getEntityWriter(), new Message("Hello, World!")); - this.send(connection); - } - - private void db(HttpResponse response, ProcessAwareServerHttpConnectionManagedObject connection) { - - // Determine if will overload queries - RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1); - if (conn == null) { - this.sendError(connection, THROTTLED); - return; // rate limited - } - - // Service - Mono.from(conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") - .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) - .flatMap(result -> Mono.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - Integer number = row.get(1, Integer.class); - return new World(id, number); - }))).publishOn(conn.writeScheduler).subscribe(world -> { - try { - response.setContentType(APPLICATION_JSON, null); - this.objectMapper.writeValue(response.getEntityWriter(), world); - this.send(connection); - } catch (CancelledKeyException | ClosedChannelException ex) { - // Ignore as disconnecting client - } catch (IOException ex) { - ex.printStackTrace(); - } - }, error -> { - this.sendError(connection, error); - }, () -> { - conn.processed(1); - }); - } - - private void queries(String requestUri, HttpResponse response, - ProcessAwareServerHttpConnectionManagedObject connection) { - - // Obtain the number of queries - String queriesCountText = requestUri.substring(QUERIES_PATH_PREFIX.length()); - int queryCount = getQueryCount(queriesCountText); - - // Determine if will overload queries - RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(queryCount); - if (conn == null) { - this.sendError(connection, THROTTLED); - return; // rate limited - } - - // Service - Flux.range(1, queryCount) - .flatMap( - index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") - .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) - .flatMap(result -> Flux.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - Integer number = row.get(1, Integer.class); - return new World(id, number); - }))).collectList().publishOn(conn.writeScheduler).subscribe(worlds -> { - try { - response.setContentType(APPLICATION_JSON, null); - this.objectMapper.writeValue(response.getEntityWriter(), worlds); - this.send(connection); - } catch (CancelledKeyException | ClosedChannelException ex) { - // Ignore as disconnecting client - } catch (IOException ex) { - ex.printStackTrace(); - } - }, error -> { - this.sendError(connection, error); - }, () -> { - conn.processed(queryCount); - }); - } - - private void fortunes(HttpResponse response, - ProcessAwareServerHttpConnectionManagedObject connection) { - - // Determine if will overload queries - RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1); - if (conn == null) { - this.sendError(connection, THROTTLED); - return; // rate limited - } - - // Service - Flux.from(conn.connection.createStatement("SELECT ID, MESSAGE FROM FORTUNE").execute()) - .flatMap(result -> Flux.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - String message = row.get(1, String.class); - return new Fortune(id, message); - }))).collectList().publishOn(conn.writeScheduler).subscribe(fortunes -> { - try { - // Additional fortunes - fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - Collections.sort(fortunes, (a, b) -> a.message.compareTo(b.message)); - - // Send response - response.setContentType(TEXT_HTML, null); - ServerWriter writer = response.getEntityWriter(); - writer.write(TEMPLATE_START); - for (Fortune fortune : fortunes) { - writer.write(FORTUNE_START); - int id = fortune.getId(); - writer.write(Integer.valueOf(id).toString()); - writer.write(FORTUNE_MIDDLE); - StringEscapeUtils.ESCAPE_HTML4.translate(fortune.getMessage(), writer); - writer.write(FORTUNE_END); - } - writer.write(TEMPLATE_END); - this.send(connection); - } catch (CancelledKeyException | ClosedChannelException ex) { - // Ignore as disconnecting client - } catch (IOException ex) { - ex.printStackTrace(); - } - }, error -> { - this.sendError(connection, error); - }, () -> { - conn.processed(1); - }); - } - - private void update(String requestUri, HttpResponse response, - ProcessAwareServerHttpConnectionManagedObject connection) { - - // Obtain the number of queries - String queriesCountText = requestUri.substring(UPDATE_PATH_PREFIX.length()); - int queryCount = getQueryCount(queriesCountText); - int executeQueryCount = queryCount + 1; // select all and update - - // Determine if will overload queries - RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(executeQueryCount); - if (conn == null) { - this.sendError(connection, THROTTLED); - return; // rate limited - } - - // Service - Flux.range(1, queryCount) - .flatMap( - index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1") - .bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute()) - .flatMap(result -> Flux.from(result.map((row, metadata) -> { - Integer id = row.get(0, Integer.class); - Integer number = row.get(1, Integer.class); - return new World(id, number); - }))).collectList().flatMap(worlds -> { - Collections.sort(worlds, (a, b) -> a.id - b.id); - Batch batch = conn.connection.createBatch(); - for (World world : worlds) { - world.randomNumber = ThreadLocalRandom.current().nextInt(1, 10001); - batch.add("UPDATE WORLD SET RANDOMNUMBER = " + world.randomNumber + " WHERE ID = " - + world.id); - } - return Mono.from(batch.execute()).map((result) -> worlds); - }).publishOn(conn.writeScheduler).subscribe(worlds -> { - try { - response.setContentType(APPLICATION_JSON, null); - this.objectMapper.writeValue(response.getEntityWriter(), worlds); - this.send(connection); - } catch (CancelledKeyException | ClosedChannelException ex) { - // Ignore as disconnecting client - } catch (IOException ex) { - ex.printStackTrace(); - } - }, error -> { - this.sendError(connection, error); - }, () -> { - conn.processed(executeQueryCount); - }); - } - - private void sendError(ProcessAwareServerHttpConnectionManagedObject connection, - Throwable failure) { - try { - - // Setup to send response - HttpResponse response = connection.getResponse(); - response.reset(); - - // Determine type of error - if (failure instanceof R2dbcTransientResourceException) { - - // Indicate overloaded - response.setStatus(HttpStatus.SERVICE_UNAVAILABLE); - - } else { - // Provide details of failure - response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR); - response.setContentType(TEXT_PLAIN, null); - failure.printStackTrace(new PrintWriter(response.getEntityWriter())); - } - - // Send error response - this.send(connection); - - } catch (CancelledKeyException | ClosedChannelException ex) { - // Ignore as disconnecting client - } catch (IOException ex) { - ex.printStackTrace(); - } - } - - private static int getQueryCount(String queries) { - try { - int count = Integer.parseInt(queries); - return (count < 1) ? 1 : (count > 500) ? 500 : count; - } catch (NumberFormatException ex) { - return 1; - } - } - } - - private static class RateLimit { - - private final RateLimitedConnection[] rateLimitedConnections; - - private final Executor socketExecutor; - - private RateLimit(RequestHandler requestHandler, Connection[] connections) { - - // Create the write scheduler - this.socketExecutor = (runnable) -> requestHandler.execute(() -> { - runnable.run(); - }); - Scheduler writeScheduler = Schedulers.fromExecutor(this.socketExecutor); - - // Create the rate limited connections - this.rateLimitedConnections = new RateLimitedConnection[connections.length]; - for (int i = 0; i < this.rateLimitedConnections.length; i++) { - this.rateLimitedConnections[i] = new RateLimitedConnection(connections[i], writeScheduler); - } - } - - private RateLimitedConnection getAvailableConnection(int queryCount) { - - // Determine available connection for limit - for (int i = 0; i < this.rateLimitedConnections.length; i++) { - RateLimitedConnection connection = this.rateLimitedConnections[i]; - - // Determine if query count reached - int newCount = connection.activeQueries + queryCount; - if (newCount <= QUERY_BUFFER_SIZE) { - // Connection available for load - connection.activeQueries = newCount; - return connection; - } - } - - // As here, no available connection - return null; - } - } - - private static class RateLimitedConnection { - - private final Scheduler writeScheduler; - - private final Connection connection; - - private int activeQueries; - - private RateLimitedConnection(Connection connection, Scheduler writeScheduler) { - this.connection = connection; - this.writeScheduler = writeScheduler; - } - - private void processed(int queryCount) { - - // Update the active queries - this.activeQueries -= queryCount; - } - } - - @Data - public static class Message { - private final String message; - } - - @Data - @AllArgsConstructor - public static class World { - - private final int id; - - private int randomNumber; - } - - @Data - public static class Fortune { - - private final int id; - - private final String message; - } -} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml index 71121c55e8e..19052724d81 100644 --- a/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml +++ b/frameworks/Java/officefloor/src/woof_benchmark_spring/pom.xml @@ -39,7 +39,7 @@ org.springframework.boot spring-boot-maven-plugin - 2.4.2 + 2.5.0 net.officefloor.OfficeFloorMain exec diff --git a/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/pom.xml new file mode 100644 index 00000000000..f0e1d6d32e8 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/pom.xml @@ -0,0 +1,49 @@ + + + 4.0.0 + + net.officefloor.benchmarks + benchmarks + 1.0.0 + + woof_benchmark_sqlclient + + + net.officefloor.benchmarks + woof_benchmark_woof + + + net.officefloor.vertx + officevertx + + + io.vertx + vertx-pg-client + + + + + + org.apache.maven.plugins + maven-shade-plugin + + + package + + shade + + + + + net.officefloor.benchmark.SqlClientOfficeFloorMain + + + + + + + + + + \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/src/main/java/net/officefloor/benchmark/SqlClientOfficeFloorMain.java b/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/src/main/java/net/officefloor/benchmark/SqlClientOfficeFloorMain.java new file mode 100644 index 00000000000..21305d6ac6d --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_sqlclient/src/main/java/net/officefloor/benchmark/SqlClientOfficeFloorMain.java @@ -0,0 +1,195 @@ +package net.officefloor.benchmark; + +import io.vertx.core.Vertx; +import io.vertx.pgclient.PgConnectOptions; +import io.vertx.pgclient.PgConnection; +import io.vertx.sqlclient.Row; +import io.vertx.sqlclient.RowIterator; +import io.vertx.sqlclient.SqlConnection; +import io.vertx.sqlclient.Tuple; +import net.officefloor.server.RequestHandler; +import net.officefloor.server.http.HttpResponse; +import net.officefloor.server.http.ServerHttpConnection; +import net.officefloor.server.http.parse.HttpRequestParser; +import net.officefloor.vertx.OfficeFloorVertx; + +import java.net.Socket; +import java.util.ArrayList; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ThreadLocalRandom; + +/** + * R2DBC server. + * + * @author Daniel Sagenschneider + */ +public class SqlClientOfficeFloorMain implements DatabaseOperations { + + /** + * Run application. + */ + public static void main(String[] args) throws Exception { + RawWoof.run(args, (socketCount, server, port, database, username, + password) -> new SqlClientOfficeFloorMain(socketCount, server, port, database, username, password)); + } + + /** + * {@link ThreadLocal} {@link PgConnection} instances. + */ + private final ThreadLocal threadLocalConnection; + + /** + * Instantiate. + * + * @param socketCount Number of server {@link Socket} instances. + * @param server Name of database server. + * @param port Port of database. + * @param database Name of database within server. + * @param username Username. + * @param password Password. + */ + public SqlClientOfficeFloorMain(int socketCount, String server, int port, String database, String username, + String password) { + + // Obtain the vertx + Vertx vertx = OfficeFloorVertx.getVertx(); + + // Create connection + PgConnectOptions connectOptions = new PgConnectOptions().setHost(server).setPort(port).setDatabase(database) + .setUser(username).setPassword(password); + + // Create thread local connection + this.threadLocalConnection = new ThreadLocal() { + @Override + protected PgConnection initialValue() { + try { + return OfficeFloorVertx.block(PgConnection.connect(vertx, connectOptions)); + } catch (Exception ex) { + throw new IllegalStateException("Failed to setup connection", ex); + } + } + }; + } + + /* + * ===================== DatabaseOperations ====================== + */ + + @Override + public void threadSetup(RequestHandler requestHandler) { + // Nothing thread specific to set up + } + + @Override + public void db(HttpResponse response, ServerHttpConnection connection, DatabaseOperationsContext context) { + this.threadLocalConnection.get().preparedQuery("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID=$1") + .execute(Tuple.of(ThreadLocalRandom.current().nextInt(1, 10001)), + result -> { + if (result.failed()) { + context.sendError(connection, result.cause()); + } else { + RowIterator rows = result.result().iterator(); + if (!rows.hasNext()) { + context.sendError(connection, 404); + } else { + Row row = rows.next(); + World world = new World(row.getInteger(0), row.getInteger(1)); + context.dbSend(response, connection, world); + } + } + }); + } + + @Override + public void queries(int queryCount, HttpResponse response, ServerHttpConnection connection, + DatabaseOperationsContext context) { + Queue worlds = new ConcurrentLinkedDeque<>(); + SqlConnection sqlConnection = this.threadLocalConnection.get(); + for (int i = 0; i < queryCount; i++) { + sqlConnection.preparedQuery("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID=$1") + .execute(Tuple.of(ThreadLocalRandom.current().nextInt(1, 10001)), + result -> { + if (result.failed()) { + context.sendError(connection, result.cause()); + } else { + RowIterator rows = result.result().iterator(); + if (!rows.hasNext()) { + context.sendError(connection, 404); + } else { + Row row = rows.next(); + World world = new World(row.getInteger(0), row.getInteger(1)); + worlds.add(world); + + if (worlds.size() == queryCount) { + context.queriesSend(response, connection, new ArrayList<>(worlds)); + } + } + } + }); + } + } + + @Override + public void fortunes(HttpResponse response, ServerHttpConnection connection, DatabaseOperationsContext context) { + this.threadLocalConnection.get().preparedQuery("SELECT ID, MESSAGE FROM FORTUNE") + .execute(result -> { + if (result.failed()) { + context.sendError(connection, result.cause()); + } else { + List fortunes = new ArrayList<>(16); + RowIterator rows = result.result().iterator(); + while (rows.hasNext()) { + Row row = rows.next(); + fortunes.add(new Fortune(row.getInteger(0), row.getString(1))); + } + context.fortunesSend(response, connection, fortunes); + } + }); + } + + @Override + public void update(int queryCount, HttpResponse response, ServerHttpConnection connection, + DatabaseOperationsContext context) { + Queue worlds = new ConcurrentLinkedDeque<>(); + SqlConnection sqlConnection = this.threadLocalConnection.get(); + for (int i = 0; i < queryCount; i++) { + sqlConnection.preparedQuery("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID=$1") + .execute(Tuple.of(ThreadLocalRandom.current().nextInt(1, 10001)), + result -> { + if (result.failed()) { + context.sendError(connection, result.cause()); + } else { + RowIterator rows = result.result().iterator(); + if (!rows.hasNext()) { + context.sendError(connection, 404); + } else { + Row row = rows.next(); + World world = new World(row.getInteger(0), ThreadLocalRandom.current().nextInt(1, 10001)); + worlds.add(world); + + if (worlds.size() == queryCount) { + + // All worlds obtained, so run update + List batch = new ArrayList<>(queryCount); + for (World update : worlds) { + batch.add(Tuple.of(update.randomNumber, update.id)); + } + sqlConnection.preparedQuery("UPDATE world SET randomnumber=$1 WHERE id=$2").executeBatch(batch, ar -> { + if (result.failed()) { + context.sendError(connection, result.cause()); + } else { + + // Updated, so send response + context.queriesSend(response, connection, new ArrayList<>(worlds)); + } + }); + } + } + } + }); + } + } + +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_thread_affinity/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_thread_affinity/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_thread_affinity/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_undertow/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_undertow/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_undertow/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_vertx/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_vertx/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_vertx/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_vertx/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_vertx/pom.xml new file mode 100644 index 00000000000..8e0d5ac341d --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_vertx/pom.xml @@ -0,0 +1,45 @@ + + + 4.0.0 + + net.officefloor.benchmarks + benchmarks + 1.0.0 + + woof_benchmark_vertx + + + net.officefloor.benchmarks + woof_benchmark + + + net.officefloor.server + officeserver_vertx + + + + + + org.apache.maven.plugins + maven-shade-plugin + + + package + + shade + + + + + net.officefloor.OfficeFloorMain + + + + + + + + + + \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/.gitignore b/frameworks/Java/officefloor/src/woof_benchmark_woof/.gitignore new file mode 100644 index 00000000000..89540d82355 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/.gitignore @@ -0,0 +1 @@ +/dependency-reduced-pom.xml diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/pom.xml b/frameworks/Java/officefloor/src/woof_benchmark_woof/pom.xml new file mode 100644 index 00000000000..a79ad25450c --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/pom.xml @@ -0,0 +1,35 @@ + + + 4.0.0 + + net.officefloor.benchmarks + benchmarks + 1.0.0 + + woof_benchmark_woof + + + net.officefloor.web + woof + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.module + jackson-module-afterburner + + + net.officefloor.web + officeweb_executive + + + net.officefloor.web + officeweb + + + + + \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperations.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperations.java new file mode 100644 index 00000000000..bc0176f2709 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperations.java @@ -0,0 +1,63 @@ +package net.officefloor.benchmark; + +import java.net.Socket; + +import net.officefloor.server.RequestHandler; +import net.officefloor.server.http.HttpResponse; +import net.officefloor.server.http.ServerHttpConnection; +import net.officefloor.server.http.parse.HttpRequestParser; + +/** + * @author Daniel Sagenschneider + */ +public interface DatabaseOperations { + + /** + * Invoked on the {@link Socket} {@link Thread} to initiate {@link ThreadLocal} + * setup. + * + * @param requestHandler {@link RequestHandler}. + */ + void threadSetup(RequestHandler requestHandler); + + /** + * Undertakes the db. + * + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param context {@link DatabaseOperationsContext}. + */ + void db(HttpResponse response, ServerHttpConnection connection, DatabaseOperationsContext context); + + /** + * Undertakes the queries. + * + * @param queryCount Query count. + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param context {@link DatabaseOperationsContext}. + */ + void queries(int queryCount, HttpResponse response, ServerHttpConnection connection, + DatabaseOperationsContext context); + + /** + * Undertakes the fortunes. + * + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param context {@link DatabaseOperationsContext}. + */ + void fortunes(HttpResponse response, ServerHttpConnection connection, DatabaseOperationsContext context); + + /** + * Undertakes the update. + * + * @param queryCount Query count. + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param context {@link DatabaseOperationsContext}. + */ + void update(int queryCount, HttpResponse response, ServerHttpConnection connection, + DatabaseOperationsContext context); + +} diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperationsContext.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperationsContext.java new file mode 100644 index 00000000000..ecb027148a5 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperationsContext.java @@ -0,0 +1,72 @@ +package net.officefloor.benchmark; + +import java.util.List; + +import net.officefloor.server.http.HttpResponse; +import net.officefloor.server.http.ServerHttpConnection; + +/** + * @author Daniel Sagenschneider + */ +public interface DatabaseOperationsContext { + + /** + * Obtains {@link Exception} for transient overload of resource. + * + * @return {@link Exception} for transient overload of resource. + */ + Exception getTransientResourceException(); + + /** + * Sends db response. + * + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param world {@link World} to send. + */ + void dbSend(HttpResponse response, ServerHttpConnection connection, World world); + + /** + * Sends queries response. + * + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param worlds {@link World} instances to send. + */ + void queriesSend(HttpResponse response, ServerHttpConnection connection, List worlds); + + /** + * Sends fortunes response. + * + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param fortune {@link Fortune} instances to send. + */ + void fortunesSend(HttpResponse response, ServerHttpConnection connection, List fortunes); + + /** + * Sends update response. + * + * @param response {@link HttpResponse}. + * @param connection {@link ServerHttpConnection}. + * @param worlds {@link World} instances to send. + */ + void updateSend(HttpResponse response, ServerHttpConnection connection, List worlds); + + /** + * Sends error. + * + * @param connection {@link ServerHttpConnection}. + * @param failure Cause. + */ + void sendError(ServerHttpConnection connection, Throwable failure); + + /** + * Sends error. + * + * @param connection {@link ServerHttpConnection}. + * @param satus Status. + */ + void sendError(ServerHttpConnection connection, int status); + +} diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperationsFactory.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperationsFactory.java new file mode 100644 index 00000000000..5695948bc07 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/DatabaseOperationsFactory.java @@ -0,0 +1,26 @@ +package net.officefloor.benchmark; + +import java.net.Socket; + +/** + * Factory for the {@link DatabaseOperations}. + * + * @author Daniel Sagenschneider + */ +public interface DatabaseOperationsFactory { + + /** + * Creates the {@link DatabaseOperations}. + * + * @param socketCount Number of server {@link Socket} instances. + * @param server Name of database server. + * @param port Port of database. + * @param database Name of database within server. + * @param username Username. + * @param password Password. + * @return {@link DatabaseOperations}. + */ + DatabaseOperations createDatabaseOperations(int socketCount, String server, int port, String database, + String username, String password); + +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/Fortune.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/Fortune.java new file mode 100644 index 00000000000..1278124c94f --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/Fortune.java @@ -0,0 +1,18 @@ +package net.officefloor.benchmark; + +/** + * Fortune entry. + * + * @author Daniel Sagenschneider + */ +public class Fortune { + + public final int id; + + public final String message; + + public Fortune(int id, String message) { + this.id = id; + this.message = message; + } +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/Message.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/Message.java new file mode 100644 index 00000000000..d004ec45a7f --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/Message.java @@ -0,0 +1,15 @@ +package net.officefloor.benchmark; + +/** + * Message response. + * + * @author Daniel Sagenschneider + */ +public class Message { + + public final String message; + + public Message(String message) { + this.message = message; + } +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/RawWoof.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/RawWoof.java new file mode 100644 index 00000000000..9bcd66cdbbe --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/RawWoof.java @@ -0,0 +1,481 @@ +/* + * OfficeFloor - http://www.officefloor.net + * Copyright (C) 2005-2018 Daniel Sagenschneider + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +package net.officefloor.benchmark; + +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.ByteBuffer; +import java.nio.channels.CancelledKeyException; +import java.nio.channels.ClosedChannelException; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.logging.Logger; + +import org.apache.commons.text.StringEscapeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.module.afterburner.AfterburnerModule; + +import net.officefloor.frame.api.manage.OfficeFloor; +import net.officefloor.frame.api.manage.ProcessManager; +import net.officefloor.frame.api.managedobject.ManagedObjectContext; +import net.officefloor.frame.api.managedobject.ProcessSafeOperation; +import net.officefloor.frame.api.managedobject.pool.ThreadCompletionListener; +import net.officefloor.server.RequestHandler; +import net.officefloor.server.SocketManager; +import net.officefloor.server.SocketServicer; +import net.officefloor.server.http.AbstractHttpServicerFactory; +import net.officefloor.server.http.HttpHeaderName; +import net.officefloor.server.http.HttpHeaderValue; +import net.officefloor.server.http.HttpRequest; +import net.officefloor.server.http.HttpResponse; +import net.officefloor.server.http.HttpResponseHeaders; +import net.officefloor.server.http.HttpServerLocation; +import net.officefloor.server.http.HttpServerSocketManagedObjectSource; +import net.officefloor.server.http.HttpStatus; +import net.officefloor.server.http.ServerHttpConnection; +import net.officefloor.server.http.impl.HttpServerLocationImpl; +import net.officefloor.server.http.impl.ProcessAwareServerHttpConnectionManagedObject; +import net.officefloor.server.http.parse.HttpRequestParser; +import net.officefloor.server.http.parse.HttpRequestParser.HttpRequestParserMetaData; +import net.officefloor.server.stream.ServerWriter; +import net.officefloor.server.stream.impl.ThreadLocalStreamBufferPool; + +/** + *

+ * {@link SocketManager} raw performance. + *

+ * Allows determining the overhead of the {@link OfficeFloor} framework. + */ +public abstract class RawWoof { + + /** + * {@link SocketManager}. + */ + public static SocketManager socketManager = null; + + /** + * {@link Logger}. + */ + private static Logger logger = Logger.getLogger(RawWoof.class.getName()); + + /** + * Run application. + * + * @param args Command line arguments. + * @param operationsFactory {@link DatabaseOperationsFactory}. + */ + public static void run(String[] args, DatabaseOperationsFactory operationsFactory) throws Exception { + + // Obtain the port from properties + int port = args.length > 0 ? Integer.parseInt(args[0]) : 8080; + + // Ensure previous socket manager shutdown (typically from tests) + if (socketManager != null) { + socketManager.shutdown(); + } + + // Indicate details + String server = System.getProperty("OFFICE.net_officefloor_jdbc_DataSourceManagedObjectSource.server", + "tfb-database"); + System.out.println("Starting server on port " + port + " talking to database " + server); + + // Create the server location + HttpServerLocation serverLocation = new HttpServerLocationImpl("localhost", port, -1); + + // Create a thread factory per logical CPU + ThreadCompletionListener[] threadCompletionListenerCapture = new ThreadCompletionListener[]{null}; + ThreadFactory[] executionStrategy = RawWoofThreadAffinity + .createThreadFactories(() -> (ThreadLocalStreamBufferPool) threadCompletionListenerCapture[0]); + System.out.println("Using " + executionStrategy.length + " executors"); + + // Create the socket manager + socketManager = HttpServerSocketManagedObjectSource.createSocketManager(executionStrategy, + (threadCompletionListener) -> threadCompletionListenerCapture[0] = threadCompletionListener); + + // Create the database operations + DatabaseOperations operations = operationsFactory.createDatabaseOperations(executionStrategy.length, server, + 5432, "hello_world", "benchmarkdbuser", "benchmarkdbpass"); + + // Create raw HTTP servicing + RawHttpServicerFactory serviceFactory = new RawHttpServicerFactory(serverLocation, operations); + socketManager.bindServerSocket(serverLocation.getClusterHttpPort(), null, null, serviceFactory, serviceFactory); + + // Setup Date + ScheduledExecutorService dateTimer = Executors.newScheduledThreadPool(1); + dateTimer.scheduleAtFixedRate(serviceFactory.updateDate, 0, 1, TimeUnit.SECONDS); + + // Start servicing + Runnable[] runnables = socketManager.getRunnables(); + for (int i = 0; i < runnables.length; i++) { + executionStrategy[i].newThread(runnables[i]).start(); + } + Thread.sleep(1000); // allow threads to start up + + // Indicate running + System.out.println("OfficeFloor running on port " + serverLocation.getClusterHttpPort()); + } + + /** + * Raw {@link AbstractHttpServicerFactory}. + */ + private static class RawHttpServicerFactory extends AbstractHttpServicerFactory + implements DatabaseOperationsContext { + + private static HttpHeaderName NAME_SERVER = new HttpHeaderName("Server"); + + private static HttpHeaderValue VALUE_SERVER = new HttpHeaderValue("O"); + + private static HttpHeaderName NAME_DATE = new HttpHeaderName("Date"); + + private static byte[] HELLO_WORLD = "Hello, World!".getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final HttpHeaderValue APPLICATION_JSON = new HttpHeaderValue("application/json"); + + private static final HttpHeaderValue TEXT_PLAIN = new HttpHeaderValue("text/plain"); + + private static final HttpHeaderValue TEXT_HTML = new HttpHeaderValue("text/html;charset=utf-8"); + + private static final String QUERIES_PATH_PREFIX = "/queries?queries="; + + private static final String UPDATE_PATH_PREFIX = "/update?queries="; + + private static final byte[] TEMPLATE_START = "Fortunes" + .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] FORTUNE_START = "" + .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] TEMPLATE_END = "
idmessage
" + .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] FORTUNE_MIDDLE = "" + .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final byte[] FORTUNE_END = "
" + .getBytes(ServerHttpConnection.DEFAULT_HTTP_ENTITY_CHARSET); + + private static final TransientResourceException THROTTLED = new TransientResourceException(); + + private static class TransientResourceException extends Exception { + private static final long serialVersionUID = 1L; + } + + /** + * Date {@link HttpHeaderValue}. + */ + private volatile HttpHeaderValue dateHttpHeader; + + private final Runnable updateDate = () -> { + String now = DateTimeFormatter.RFC_1123_DATE_TIME.format(ZonedDateTime.now(ZoneOffset.UTC)); + RawHttpServicerFactory.this.dateHttpHeader = new HttpHeaderValue(now); + }; + + /** + * {@link ObjectMapper}. + */ + private final ObjectMapper objectMapper = new ObjectMapper(); + + /** + * {@link ManagedObjectContext}. + */ + private static ManagedObjectContext managedObjectContext = new ManagedObjectContext() { + + @Override + public String getBoundName() { + return RawWoof.class.getSimpleName(); + } + + @Override + public Logger getLogger() { + return logger; + } + + @Override + public R run(ProcessSafeOperation operation) throws T { + return operation.run(); + } + }; + + private static int getQueryCount(String queries) { + try { + int count = Integer.parseInt(queries); + return (count < 1) ? 1 : (count > 500) ? 500 : count; + } catch (NumberFormatException ex) { + return 1; + } + } + + /** + * {@link DatabaseOperations}. + */ + private final DatabaseOperations databaseOperations; + + /** + * Instantiate. + * + * @param serverLocation {@link HttpServerLocation}. + * @param operations {@link DatabaseOperations}. + */ + public RawHttpServicerFactory(HttpServerLocation serverLocation, DatabaseOperations operations) { + super(serverLocation, false, new HttpRequestParserMetaData(100, 1000, 1000000), null, null, true); + this.objectMapper.registerModule(new AfterburnerModule()); + this.databaseOperations = operations; + } + + /** + * Sends the {@link HttpResponse}. + * + * @param connection {@link ServerHttpConnection}. + * @throws IOException If fails to send. + */ + protected void send(ServerHttpConnection connection) throws IOException { + try { + @SuppressWarnings("unchecked") + ProcessAwareServerHttpConnectionManagedObject rawConnection = (ProcessAwareServerHttpConnectionManagedObject) connection; + rawConnection.getServiceFlowCallback().run(null); + } catch (IOException ex) { + throw ex; + } catch (Throwable ex) { + throw new IOException(ex); + } + } + + /* + * =============== SocketServicerFactory ================= + */ + + @Override + public SocketServicer createSocketServicer( + RequestHandler requestHandler) { + + // Set up the thread + this.databaseOperations.threadSetup(requestHandler); + + // Continue on to create socket servicer + return super.createSocketServicer(requestHandler); + } + + /* + * ===================== HttpServicer ==================== + */ + + @Override + protected ProcessManager service(ProcessAwareServerHttpConnectionManagedObject connection) + throws IOException { + + // Configure context + connection.setManagedObjectContext(managedObjectContext); + + // Service the connection + HttpRequest request = connection.getRequest(); + HttpResponse response = connection.getResponse(); + + // Provider Server and Date + HttpResponseHeaders headers = response.getHeaders(); + headers.addHeader(NAME_SERVER, VALUE_SERVER); + headers.addHeader(NAME_DATE, this.dateHttpHeader); + + // Determine request + String requestUri = request.getUri(); + switch (requestUri) { + + case "/plaintext": + response.setContentType(TEXT_PLAIN, null); + response.getEntity().write(HELLO_WORLD); + this.send(connection); + break; + + case "/json": + response.setContentType(APPLICATION_JSON, null); + this.objectMapper.writeValue(response.getEntityWriter(), new Message("Hello, World!")); + this.send(connection); + break; + + case "/db": + this.databaseOperations.db(response, connection, this); + break; + + case "/fortunes": + this.databaseOperations.fortunes(response, connection, this); + break; + + default: + // Provide redirect + if (requestUri.startsWith(QUERIES_PATH_PREFIX)) { + // Obtain the number of queries + String queriesCountText = requestUri.substring(QUERIES_PATH_PREFIX.length()); + int queryCount = getQueryCount(queriesCountText); + + // Undertake queries + this.databaseOperations.queries(queryCount, response, connection, this); + + } else if (requestUri.startsWith(UPDATE_PATH_PREFIX)) { + // Obtain the number of queries + String queriesCountText = requestUri.substring(UPDATE_PATH_PREFIX.length()); + int queryCount = getQueryCount(queriesCountText); + + // Undertake update + this.databaseOperations.update(queryCount, response, connection, this); + + } else { + // Unknown request + response.setStatus(HttpStatus.NOT_FOUND); + this.send(connection); + } + break; + } + + // No process management + return null; + } + + /* + * ==================== DatabaseOperationsContext ===================== + */ + + @Override + public Exception getTransientResourceException() { + return THROTTLED; + } + + @Override + public void dbSend(HttpResponse response, ServerHttpConnection connection, World world) { + try { + response.setContentType(APPLICATION_JSON, null); + this.objectMapper.writeValue(response.getEntityWriter(), world); + this.send(connection); + } catch (CancelledKeyException | ClosedChannelException ex) { + // Ignore as disconnecting client + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + @Override + public void queriesSend(HttpResponse response, ServerHttpConnection connection, List worlds) { + try { + response.setContentType(APPLICATION_JSON, null); + this.objectMapper.writeValue(response.getEntityWriter(), worlds); + this.send(connection); + } catch (CancelledKeyException | ClosedChannelException ex) { + // Ignore as disconnecting client + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + @Override + public void fortunesSend(HttpResponse response, ServerHttpConnection connection, List fortunes) { + try { + // Additional fortunes + fortunes.add(new Fortune(0, "Additional fortune added at request time.")); + Collections.sort(fortunes, (a, b) -> a.message.compareTo(b.message)); + + // Send response + response.setContentType(TEXT_HTML, null); + ServerWriter writer = response.getEntityWriter(); + writer.write(TEMPLATE_START); + for (Fortune fortune : fortunes) { + writer.write(FORTUNE_START); + int id = fortune.id; + writer.write(Integer.valueOf(id).toString()); + writer.write(FORTUNE_MIDDLE); + StringEscapeUtils.ESCAPE_HTML4.translate(fortune.message, writer); + writer.write(FORTUNE_END); + } + writer.write(TEMPLATE_END); + this.send(connection); + } catch (CancelledKeyException | ClosedChannelException ex) { + // Ignore as disconnecting client + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + @Override + public void updateSend(HttpResponse response, ServerHttpConnection connection, List worlds) { + try { + response.setContentType(APPLICATION_JSON, null); + this.objectMapper.writeValue(response.getEntityWriter(), worlds); + this.send(connection); + } catch (CancelledKeyException | ClosedChannelException ex) { + // Ignore as disconnecting client + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + @Override + public void sendError(ServerHttpConnection connection, Throwable failure) { + try { + + // Setup to send response + HttpResponse response = connection.getResponse(); + response.reset(); + + // Determine type of error + if (failure instanceof TransientResourceException) { + + // Indicate overloaded + response.setStatus(HttpStatus.SERVICE_UNAVAILABLE); + + } else { + // Provide details of failure + response.setStatus(HttpStatus.INTERNAL_SERVER_ERROR); + response.setContentType(TEXT_PLAIN, null); + failure.printStackTrace(new PrintWriter(response.getEntityWriter())); + } + + // Send error response + this.send(connection); + + } catch (CancelledKeyException | ClosedChannelException ex) { + // Ignore as disconnecting client + } catch (IOException ex) { + ex.printStackTrace(); + } + } + + @Override + public void sendError(ServerHttpConnection connection, int status) { + try { + // Setup to send response + HttpResponse response = connection.getResponse(); + response.reset(); + + // Flag error status + response.setStatus(HttpStatus.getHttpStatus(status)); + + // Send error response + this.send(connection); + + } catch (IOException ex) { + ex.printStackTrace(); + } + } + } + +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/RawWoofThreadAffinity.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/RawWoofThreadAffinity.java new file mode 100644 index 00000000000..66ecfcc405c --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/RawWoofThreadAffinity.java @@ -0,0 +1,75 @@ +package net.officefloor.benchmark; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.ThreadFactory; +import java.util.function.Supplier; + +import net.officefloor.server.stream.impl.ThreadLocalStreamBufferPool; +import net.officefloor.web.executive.CpuCore; +import net.openhft.affinity.Affinity; + +/** + * Encapsulating the {@link Thread} affinity. + * + * @author Daniel Sagenschneider + */ +public class RawWoofThreadAffinity { + + private static final boolean IS_BENCHMARK_DEBUG = Boolean.getBoolean("officefloor.benchmark.debug"); + + /** + * Obtains the {@link ThreadFactory} with {@link Thread} affinity. + * + * @param bufferPoolSupplier Obtains the {@link ThreadLocalStreamBufferPool}. + * @return {@link ThreadFactory} with {@link Thread} affinity. + */ + public static ThreadFactory[] createThreadFactories(Supplier bufferPoolSupplier) { + List threadFactories = new LinkedList<>(); + if (IS_BENCHMARK_DEBUG) { + // Simple threading for debug + System.out.println("\n\nWARNING: using debug mode so performance will suffer\n\n"); + for (int i = 0; i < Runtime.getRuntime().availableProcessors(); i++) { + threadFactories.add((runnable) -> new Thread(() -> { + ThreadLocalStreamBufferPool bufferPool = bufferPoolSupplier.get(); + try { + bufferPool.activeThreadLocalPooling(); + runnable.run(); + } finally { + bufferPool.threadComplete(); + } + })); + } + + } else { + // Provide socket per thread with thread affinity + for (CpuCore cpuCore : CpuCore.getCores()) { + for (CpuCore.LogicalCpu logicalCpu : cpuCore.getCpus()) { + + // Create thread factory for logical CPU + ThreadFactory boundThreadFactory = (runnable) -> new Thread(() -> { + ThreadLocalStreamBufferPool bufferPool = bufferPoolSupplier.get(); + try { + // Bind thread to logical CPU + Affinity.setAffinity(logicalCpu.getCpuAffinity()); + + // Set up for thread local buffer pooling + bufferPool.activeThreadLocalPooling(); + + // Run logic for thread + runnable.run(); + } finally { + bufferPool.threadComplete(); + } + }); + + // Add the thread factory + threadFactories.add(boundThreadFactory); + } + } + } + + return threadFactories.toArray(new ThreadFactory[0]); + } + +} \ No newline at end of file diff --git a/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/World.java b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/World.java new file mode 100644 index 00000000000..17d5ba84eb5 --- /dev/null +++ b/frameworks/Java/officefloor/src/woof_benchmark_woof/src/main/java/net/officefloor/benchmark/World.java @@ -0,0 +1,18 @@ +package net.officefloor.benchmark; + +/** + * World response. + * + * @author Daniel Sagenschneider + */ +public class World { + + public final int id; + + public int randomNumber; + + public World(int id, int randomNumber) { + this.id = id; + this.randomNumber = randomNumber; + } +} \ No newline at end of file diff --git a/frameworks/Java/redkale/benchmark_config.json b/frameworks/Java/redkale/benchmark_config.json index 3268f50ddbf..36add1c5bce 100644 --- a/frameworks/Java/redkale/benchmark_config.json +++ b/frameworks/Java/redkale/benchmark_config.json @@ -21,7 +21,9 @@ "notes": "", "versus": "Redkale" }, - "cache": { + "j17": { + "plaintext_url": "/plaintext", + "json_url": "/json", "cached_query_url": "/cached-worlds?q=", "port": 8080, "approach": "Realistic", @@ -35,48 +37,12 @@ "webserver": "Redkale", "os": "Linux", "database_os": "Linux", - "display_name": "redkale", - "notes": "", - "versus": "Redkale" - }, - "db": { - "db_url": "/db", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "Redkale", - "language": "Java", - "flavor": "None", - "orm": "Raw", - "platform": "Redkale", - "webserver": "Redkale", - "os": "Linux", - "database_os": "Linux", - "display_name": "redkale", + "display_name": "redkale-j17", "notes": "", "versus": "Redkale" }, - "query": { - "query_url": "/queries?q=", - "port": 8080, - "approach": "Realistic", - "classification": "Fullstack", - "database": "Postgres", - "framework": "Redkale", - "language": "Java", - "flavor": "None", - "orm": "Raw", - "platform": "Redkale", - "webserver": "Redkale", - "os": "Linux", - "database_os": "Linux", - "display_name": "redkale", - "notes": "", - "versus": "Redkale" - }, - "fortune": { - "fortune_url": "/fortunes", + "cache": { + "cached_query_url": "/cached-worlds?q=", "port": 8080, "approach": "Realistic", "classification": "Fullstack", @@ -93,8 +59,11 @@ "notes": "", "versus": "Redkale" }, - "update": { - "update_url": "/updates?q=", + "postgres": { + "db_url": "/db", + "query_url2": "/queries?q=", + "fortune_url2": "/fortunes", + "update_url2": "/updates?q=", "port": 8080, "approach": "Realistic", "classification": "Fullstack", @@ -107,7 +76,7 @@ "webserver": "Redkale", "os": "Linux", "database_os": "Linux", - "display_name": "redkale", + "display_name": "redkale-postgres", "notes": "", "versus": "Redkale" } diff --git a/frameworks/Java/redkale/conf/application.xml b/frameworks/Java/redkale/conf/application.xml index 7e709c30812..8279474b694 100644 --- a/frameworks/Java/redkale/conf/application.xml +++ b/frameworks/Java/redkale/conf/application.xml @@ -5,8 +5,9 @@ - + + diff --git a/frameworks/Java/redkale/config.toml b/frameworks/Java/redkale/config.toml index 1983c168f36..915ed3e3cf2 100644 --- a/frameworks/Java/redkale/config.toml +++ b/frameworks/Java/redkale/config.toml @@ -14,7 +14,9 @@ platform = "Redkale" webserver = "Redkale" versus = "Redkale" -[cache] +[j17] +urls.plaintext = "/plaintext" +urls.json = "/json" urls.cached_query = "/cached-worlds?q=" approach = "Realistic" classification = "Fullstack" @@ -26,8 +28,8 @@ platform = "Redkale" webserver = "Redkale" versus = "Redkale" -[db] -urls.db = "/db" +[cache] +urls.cached_query = "/cached-worlds?q=" approach = "Realistic" classification = "Fullstack" database = "Postgres" @@ -38,31 +40,10 @@ platform = "Redkale" webserver = "Redkale" versus = "Redkale" -[fortune] +[postgres] +urls.db = "/db" urls.fortune = "/fortunes" -approach = "Realistic" -classification = "Fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "Redkale" -webserver = "Redkale" -versus = "Redkale" - -[query] urls.query = "/queries?q=" -approach = "Realistic" -classification = "Fullstack" -database = "Postgres" -database_os = "Linux" -os = "Linux" -orm = "Raw" -platform = "Redkale" -webserver = "Redkale" -versus = "Redkale" - -[update] urls.update = "/updates?q=" approach = "Realistic" classification = "Fullstack" @@ -73,4 +54,3 @@ orm = "Raw" platform = "Redkale" webserver = "Redkale" versus = "Redkale" - diff --git a/frameworks/Java/redkale/pom.xml b/frameworks/Java/redkale/pom.xml index 6abdb8ca0c9..6545ddfddd9 100644 --- a/frameworks/Java/redkale/pom.xml +++ b/frameworks/Java/redkale/pom.xml @@ -10,21 +10,21 @@ org.redkale.boot.Application 1.0.0 UTF-8 - 11 - 11 + 1.8 + 1.8 org.redkale redkale - 2.3.0-SNAPSHOT + 2.4.0-SNAPSHOT org.redkalex redkale-plugins - 2.3.0-SNAPSHOT + 2.4.0-SNAPSHOT diff --git a/frameworks/Java/redkale/redkale-cache.dockerfile b/frameworks/Java/redkale/redkale-cache.dockerfile index 173f5708d3b..75a65052ab1 100644 --- a/frameworks/Java/redkale/redkale-cache.dockerfile +++ b/frameworks/Java/redkale/redkale-cache.dockerfile @@ -12,4 +12,4 @@ COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark. EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=20000", "-Dbenchmarks.cache=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=80000", "-Dbenchmarks.cache=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/redkale-fortune.dockerfile b/frameworks/Java/redkale/redkale-j17.dockerfile similarity index 62% rename from frameworks/Java/redkale/redkale-fortune.dockerfile rename to frameworks/Java/redkale/redkale-j17.dockerfile index 38e8fe0c0f6..7467f65f804 100644 --- a/frameworks/Java/redkale/redkale-fortune.dockerfile +++ b/frameworks/Java/redkale/redkale-j17.dockerfile @@ -1,15 +1,15 @@ -FROM maven:3.6.3-openjdk-16-slim as maven +FROM maven:3.8.1-openjdk-17-slim as maven WORKDIR /redkale COPY src src COPY conf conf COPY pom.xml pom.xml RUN mvn package -q -FROM openjdk:16-jdk-slim +FROM openjdk:17-jdk-slim WORKDIR /redkale COPY conf conf COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark.jar EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=20000", "-Dbenchmarks.db=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=40000", "-Dbenchmarks.cache=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/redkale-db.dockerfile b/frameworks/Java/redkale/redkale-postgres.dockerfile similarity index 85% rename from frameworks/Java/redkale/redkale-db.dockerfile rename to frameworks/Java/redkale/redkale-postgres.dockerfile index 38e8fe0c0f6..4032dcdf8ec 100644 --- a/frameworks/Java/redkale/redkale-db.dockerfile +++ b/frameworks/Java/redkale/redkale-postgres.dockerfile @@ -12,4 +12,4 @@ COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark. EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=20000", "-Dbenchmarks.db=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=80000", "-Dbenchmarks.db=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/redkale-query.dockerfile b/frameworks/Java/redkale/redkale-query.dockerfile deleted file mode 100644 index 38e8fe0c0f6..00000000000 --- a/frameworks/Java/redkale/redkale-query.dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM maven:3.6.3-openjdk-16-slim as maven -WORKDIR /redkale -COPY src src -COPY conf conf -COPY pom.xml pom.xml -RUN mvn package -q - -FROM openjdk:16-jdk-slim -WORKDIR /redkale -COPY conf conf -COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark.jar - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=20000", "-Dbenchmarks.db=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/redkale-update.dockerfile b/frameworks/Java/redkale/redkale-update.dockerfile deleted file mode 100644 index 38e8fe0c0f6..00000000000 --- a/frameworks/Java/redkale/redkale-update.dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM maven:3.6.3-openjdk-16-slim as maven -WORKDIR /redkale -COPY src src -COPY conf conf -COPY pom.xml pom.xml -RUN mvn package -q - -FROM openjdk:16-jdk-slim -WORKDIR /redkale -COPY conf conf -COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark.jar - -EXPOSE 8080 - -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=20000", "-Dbenchmarks.db=true", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/redkale.dockerfile b/frameworks/Java/redkale/redkale.dockerfile index 2f5508fc106..1c5756ba2bc 100644 --- a/frameworks/Java/redkale/redkale.dockerfile +++ b/frameworks/Java/redkale/redkale.dockerfile @@ -12,4 +12,4 @@ COPY --from=maven /redkale/target/redkale-benchmark-1.0.0.jar redkale-benchmark. EXPOSE 8080 -CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=20000", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] +CMD ["java", "-server", "-XX:+UseNUMA", "-XX:+UseParallelGC", "-XX:AutoBoxCacheMax=80000", "-DAPP_HOME=./", "-jar", "redkale-benchmark.jar"] diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/CachedWorld.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/CachedWorld.java index 58a6bb5d2af..ff979cbf0c5 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/CachedWorld.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/CachedWorld.java @@ -7,13 +7,13 @@ import javax.persistence.*; import org.redkale.convert.json.JsonConvert; - +import org.redkale.source.*; /** * * @author zhangjx */ -@Cacheable(direct=true) +//@Cacheable(direct = true) @Table(name = "World") public final class CachedWorld implements Comparable { @@ -53,4 +53,16 @@ public String toString() { return JsonConvert.root().convertTo(this); } + public static class WorldEntityCache { + + private Object[] array; + + public WorldEntityCache(DataSource source) { + this.array = source.queryList(CachedWorld.class).toArray(); + } + + public CachedWorld findAt(int index) { + return (CachedWorld) array[index]; + } + } } diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java index 4bd13405567..cffb0ae8ec1 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Fortune.java @@ -5,6 +5,7 @@ */ package org.redkalex.benchmark; +import java.util.*; import javax.persistence.Id; import org.redkale.convert.json.JsonConvert; @@ -27,6 +28,11 @@ public Fortune(int id, String message) { this.message = message; } + public static List sort(List fortunes) { + Collections.sort(fortunes); + return fortunes; + } + public int getId() { return id; } diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Service.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Service.java index 74a1c981cc9..4c87af58e1d 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Service.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/Service.java @@ -5,13 +5,13 @@ */ package org.redkalex.benchmark; -import java.util.*; -import java.util.concurrent.CompletableFuture; +import java.util.concurrent.*; import javax.annotation.Resource; +import org.redkale.net.ChannelContext; import org.redkale.net.http.*; import org.redkale.service.AbstractService; import org.redkale.source.*; -import org.redkale.util.AnyValue; +import org.redkalex.benchmark.CachedWorld.WorldEntityCache; /** * @@ -20,85 +20,77 @@ @RestService(name = " ", repair = false) public class Service extends AbstractService { - private static final byte[] helloBytes = "Hello, world!".intern().getBytes(); - - private final Random random = new Random(); + private static final byte[] helloBytes = "Hello, world!".getBytes(); @Resource private DataSource source; - private EntityCache cache; - - @Override - public void init(AnyValue conf) { - if (Boolean.getBoolean("benchmarks.cache")) { - this.cache = ((DataSqlSource) source).loadCache(CachedWorld.class).array(); - } - } + private WorldEntityCache cache; @RestMapping(name = "plaintext") public byte[] getHelloBytes() { return helloBytes; } - @RestMapping(name = "json", length = 27) + @RestMapping(name = "json") public Message getHelloMessage() { return Message.create("Hello, World!"); } @RestMapping(name = "db") - public CompletableFuture findWorldAsync() { - return source.findAsync(World.class, randomId()); + public CompletableFuture findWorldAsync(ChannelContext context) { + return source.findAsync(World.class, context, 1 + ThreadLocalRandom.current().nextInt(10000)); } @RestMapping(name = "queries") - public CompletableFuture queryWorldAsync(int q) { + public CompletableFuture queryWorldAsync(ChannelContext context, int q) { final int size = Math.min(500, Math.max(1, q)); final World[] worlds = new World[size]; + final ThreadLocalRandom random = ThreadLocalRandom.current(); final CompletableFuture[] futures = new CompletableFuture[size]; for (int i = 0; i < size; i++) { final int index = i; - futures[index] = source.findAsync(World.class, randomId()).thenAccept(v -> worlds[index] = v); + futures[index] = source.findAsync(World.class, context, 1 + random.nextInt(10000)).thenAccept(v -> worlds[index] = v); } return CompletableFuture.allOf(futures).thenApply(v -> worlds); } @RestMapping(name = "updates") - public CompletableFuture updateWorldAsync(int q) { + public CompletableFuture updateWorldAsync(ChannelContext context, int q) { final int size = Math.min(500, Math.max(1, q)); final World[] worlds = new World[size]; + final ThreadLocalRandom random = ThreadLocalRandom.current(); final CompletableFuture[] futures = new CompletableFuture[size]; for (int i = 0; i < size; i++) { final int index = i; - futures[index] = source.findAsync(World.class, randomId()).thenAccept(v -> worlds[index] = v.randomNumber(randomId())); + futures[index] = source.findAsync(World.class, context, 1 + random.nextInt(10000)).thenAccept(v -> worlds[index] = v.randomNumber(1 + random.nextInt(10000))); } - return CompletableFuture.allOf(futures).thenCompose(v -> { - Arrays.sort(worlds); - return source.updateAsync(worlds); - }).thenApply(v -> worlds); + return CompletableFuture.allOf(futures).thenCompose(v -> source.updateAsync(context, World.sort(worlds))).thenApply(v -> worlds); } @RestMapping(name = "fortunes") public CompletableFuture> queryFortunes() { return source.queryListAsync(Fortune.class).thenApply((fortunes) -> { fortunes.add(new Fortune(0, "Additional fortune added at request time.")); - Collections.sort(fortunes); - String html = FortunesTemplate.template(fortunes).render().toString(); + String html = FortunesTemplate.template(Fortune.sort(fortunes)).render().toString(); return new HttpResult("text/html; charset=utf-8", html); }); } @RestMapping(name = "cached-worlds") public CachedWorld[] cachedWorlds(int q) { + if (cache == null) { + synchronized (this) { + if (cache == null) cache = new WorldEntityCache(source); + } + } final int size = Math.min(500, Math.max(1, q)); final CachedWorld[] worlds = new CachedWorld[size]; + final ThreadLocalRandom random = ThreadLocalRandom.current(); for (int i = 0; i < size; i++) { - worlds[i] = cache.findAt(randomId()); + worlds[i] = cache.findAt(random.nextInt(10000)); } return worlds; } - private int randomId() { - return 1 + random.nextInt(10000); - } } diff --git a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/World.java b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/World.java index b95cc6c1976..498125541d2 100644 --- a/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/World.java +++ b/frameworks/Java/redkale/src/main/java/org/redkalex/benchmark/World.java @@ -5,6 +5,7 @@ */ package org.redkalex.benchmark; +import java.util.*; import javax.persistence.Id; import org.redkale.convert.json.JsonConvert; @@ -24,6 +25,11 @@ public World randomNumber(int randomNumber) { return this; } + public static World[] sort(World[] worlds) { + Arrays.sort(worlds); + return worlds; + } + public int getId() { return id; } diff --git a/frameworks/JavaScript/fastify/fastify-mysql.dockerfile b/frameworks/JavaScript/fastify/fastify-mysql.dockerfile index 70c0b37e076..388d84b8d33 100644 --- a/frameworks/JavaScript/fastify/fastify-mysql.dockerfile +++ b/frameworks/JavaScript/fastify/fastify-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0 COPY ./ ./ diff --git a/frameworks/JavaScript/fastify/fastify-postgres.dockerfile b/frameworks/JavaScript/fastify/fastify-postgres.dockerfile index 5d707176de1..41b0eed24e7 100644 --- a/frameworks/JavaScript/fastify/fastify-postgres.dockerfile +++ b/frameworks/JavaScript/fastify/fastify-postgres.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0 COPY ./ ./ diff --git a/frameworks/JavaScript/fastify/fastify.dockerfile b/frameworks/JavaScript/fastify/fastify.dockerfile index 4de5e22475f..cbe5c92cbe6 100644 --- a/frameworks/JavaScript/fastify/fastify.dockerfile +++ b/frameworks/JavaScript/fastify/fastify.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0 COPY ./ ./ diff --git a/frameworks/JavaScript/nodejs/README.md b/frameworks/JavaScript/nodejs/README.md index d31d3c4d8a1..19f9da5503f 100644 --- a/frameworks/JavaScript/nodejs/README.md +++ b/frameworks/JavaScript/nodejs/README.md @@ -16,7 +16,7 @@ The server is currently in Alpha state, but aims to be a drop-in replacement for ## Infrastructure Software Versions The tests were run with: -* [Node.js v14.16.0](http://nodejs.org/) +* [Node.js v14.16.1](http://nodejs.org/) * [Node MySQL 2.16.0](https://github.com/felixge/node-mysql/) * [Sequelize 5.15.1](https://github.com/sequelize/sequelize) diff --git a/frameworks/JavaScript/nodejs/nodejs.dockerfile b/frameworks/JavaScript/nodejs/nodejs.dockerfile index 977a95928f5..0b68f8f1d9c 100644 --- a/frameworks/JavaScript/nodejs/nodejs.dockerfile +++ b/frameworks/JavaScript/nodejs/nodejs.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.16.1-slim ARG TFB_TEST_NAME diff --git a/frameworks/PHP/amp/amp.dockerfile b/frameworks/PHP/amp/amp.dockerfile index d77ac4b56f3..c26bd5c312f 100644 --- a/frameworks/PHP/amp/amp.dockerfile +++ b/frameworks/PHP/amp/amp.dockerfile @@ -1,24 +1,29 @@ -FROM ubuntu:20.04 +FROM ubuntu:21.04 ARG DEBIAN_FRONTEND=noninteractive RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php RUN apt-get update -yqq > /dev/null && \ - apt-get install -yqq git unzip php7.4 php7.4-common php7.4-cli php7.4-dev php7.4-mbstring composer curl build-essential > /dev/null + apt-get install -yqq git unzip wget curl build-essential \ + php8.0-cli php8.0-mbstring php8.0-dev php8.0-xml php8.0-curl > /dev/null # An extension is required! # We deal with concurrencies over 1k, which stream_select doesn't support. - -RUN apt-get install -y php-pear php-dev libuv1-dev libuv1 > /dev/null -RUN pecl install uv-0.2.4 > /dev/null +RUN wget http://pear.php.net/go-pear.phar --quiet && php go-pear.phar +#RUN apt-get install -y libuv1-dev > /dev/null +RUN apt-get install -y libevent-dev > /dev/null +#RUN pecl install uv-0.2.4 > /dev/null && echo "extension=uv.so" > /etc/php/8.0/cli/conf.d/uv.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini ADD ./ /amp WORKDIR /amp -COPY deploy/conf/* /etc/php/7.4/cli/conf.d/ +COPY deploy/conf/* /etc/php/8.0/cli/conf.d/ + +COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer -RUN composer install --prefer-dist --optimize-autoloader --no-dev +RUN composer install --prefer-dist --optimize-autoloader --no-dev --quiet EXPOSE 8080 diff --git a/frameworks/PHP/amp/benchmark_config.json b/frameworks/PHP/amp/benchmark_config.json index 50878aaf9b3..a6cb27e7d59 100644 --- a/frameworks/PHP/amp/benchmark_config.json +++ b/frameworks/PHP/amp/benchmark_config.json @@ -4,7 +4,7 @@ "default": { "json_url": "/json", "db_url": "/db", - "query_url": "/queries?queries=", + "query_url": "/queries?q=", "fortune_url": "/fortunes", "plaintext_url": "/plaintext", "port": 8080, diff --git a/frameworks/PHP/amp/deploy/conf/php.ini b/frameworks/PHP/amp/deploy/conf/php.ini index 73ba16fcb26..68c42ed1722 100644 --- a/frameworks/PHP/amp/deploy/conf/php.ini +++ b/frameworks/PHP/amp/deploy/conf/php.ini @@ -1,6 +1,16 @@ -# Enable OPcache also for CLI scripts -opcache.enable_cli = On - # Disable assertions, because Amp uses them for debugging code zend.assertions = -1 -extension=uv.so + +opcache.enable=1 +opcache.enable_cli=1 +opcache.validate_timestamps=0 +opcache.save_comments=0 +opcache.enable_file_override=1 +opcache.huge_code_pages=1 + +mysqlnd.collect_statistics = Off + +memory_limit = 512M + +opcache.jit_buffer_size=128M +opcache.jit=tracing \ No newline at end of file diff --git a/frameworks/PHP/amp/fortunes.php b/frameworks/PHP/amp/fortunes.php index 33bc9a90b6a..d93f415f4b1 100644 --- a/frameworks/PHP/amp/fortunes.php +++ b/frameworks/PHP/amp/fortunes.php @@ -1,4 +1,4 @@ Fortunes $fortune): ?> - +
idmessage
diff --git a/frameworks/PHP/amp/server.php b/frameworks/PHP/amp/server.php index b0eaee6a8a4..7c7c2c31aa0 100644 --- a/frameworks/PHP/amp/server.php +++ b/frameworks/PHP/amp/server.php @@ -90,7 +90,7 @@ private function doHandleRequest($request) { $query = $request->getUri()->getQuery(); \parse_str($query, $queryParams); - $queries = (int) ($queryParams['queries'] ?? 1); + $queries = (int) ($queryParams['q'] ?? 1); if ($queries < 1) { $queries = 1; } elseif ($queries > 500) { @@ -157,12 +157,6 @@ private function doHandleRequest($request) { ], \ob_get_clean()); } - private function execute($statement) { - $result = yield $statement->execute([mt_rand(1, 10000)]); - yield $result->advance(); - - return $result->getCurrent(); - } }); // Case 6 - Plaintext diff --git a/frameworks/PHP/comet/comet-mysql.dockerfile b/frameworks/PHP/comet/comet-mysql.dockerfile index 30b1d5c3e87..d78bdcce155 100644 --- a/frameworks/PHP/comet/comet-mysql.dockerfile +++ b/frameworks/PHP/comet/comet-mysql.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY php.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/PHP/comet/comet.dockerfile b/frameworks/PHP/comet/comet.dockerfile index 472a3799829..d232e80d9a0 100644 --- a/frameworks/PHP/comet/comet.dockerfile +++ b/frameworks/PHP/comet/comet.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY php.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile index 69c6e7159de..2933d6e03bf 100644 --- a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile +++ b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman-mysql.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY deploy/conf/cliphp.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile index 5b78089db1f..06663d1df3f 100644 --- a/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile +++ b/frameworks/PHP/kumbiaphp/kumbiaphp-workerman.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY deploy/conf/cliphp.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/PHP/laravel/.env b/frameworks/PHP/laravel/.env index 64206361956..3bed03b2b6d 100644 --- a/frameworks/PHP/laravel/.env +++ b/frameworks/PHP/laravel/.env @@ -4,7 +4,7 @@ APP_KEY=base64:JRW3D/imCqern1eNGYaRTmP8wixsi3gWRXTSIT1LGTQ= APP_DEBUG=false APP_URL=http://localhost -LOG_CHANNEL=stack +LOG_CHANNEL=stdout DB_CONNECTION=mysql DB_HOST=tfb-database diff --git a/frameworks/PHP/laravel/README.md b/frameworks/PHP/laravel/README.md index 445a6564b02..cf762a30253 100644 --- a/frameworks/PHP/laravel/README.md +++ b/frameworks/PHP/laravel/README.md @@ -74,7 +74,21 @@ Laravel-swoole is an adapter layer between Swoole and Laravel/Lumen. It provide Also because Laravel was written under php-fpm environment where the framework is reset between each request, sometimes state changes are not re-initialized between requests since it isn't necessary in an environment where the framework is terminated after each request. To handle this, Laravel-swoole creates a sandbox for each request with a copy of initial framework state so that any changes made by the request do not impact the state of other incoming requests. -Brion Finlay 10/3/2018 +benchmark support: [Brion Finlay](https://github.com/bfinlay) 10/3/2018 + +# laravel-roadrunner Benchmarking Test + +The laravel-roadrunner test is a benchmark test of Laravel running on [Roadrunner](https://github.com/spiral/roadrunner). + +RoadRunner is an open-source (MIT licensed) high-performance PHP application server, load balancer, and process manager. It supports running as a service with the ability to extend its functionality on a per-project basis. + +RoadRunner includes PSR-7/PSR-17 compatible HTTP and HTTP/2 server and can be used to replace classic Nginx+FPM setup with much greater performance and flexibility. + +RoadRunner achieves performance improvements by reusing PHP instances so that the framework is not bootstrapped on each request. + +RoadRunner uses a synchronous model of execution rather than asynchronous for broader compatibility with libraries. + +benchmark support: [Brion Finlay](https://github.com/bfinlay) 4/16/2021 ### Test Type Implementation Source Code @@ -89,6 +103,8 @@ Brion Finlay 10/3/2018 The tests were run with: * [Swoole](https://www.swoole.co.uk/) * [laravel-swoole](https://github.com/swooletw/laravel-swoole/wiki) +* [Roadrunner](https://github.com/spiral/roadrunner) +* [Roadrunner Laravel Bridge](https://github.com/spiral/roadrunner-laravel) * [Laravel](https://laravel.com/) ## Test URLs diff --git a/frameworks/PHP/laravel/benchmark_config.json b/frameworks/PHP/laravel/benchmark_config.json index 082fe601161..d8543ef13f4 100644 --- a/frameworks/PHP/laravel/benchmark_config.json +++ b/frameworks/PHP/laravel/benchmark_config.json @@ -69,6 +69,29 @@ "display_name": "laravel-laravel-s", "notes": "", "versus": "swoole" + }, + "roadrunner": { + "json_url": "/json", + "db_url": "/db", + "query_url": "/queries/", + "fortune_url": "/fortunes", + "update_url": "/updates/", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Fullstack", + "database": "MySQL", + "framework": "laravel", + "language": "PHP", + "flavor": "None", + "orm": "Full", + "platform": "roadrunner", + "webserver": "none", + "os": "Linux", + "database_os": "Linux", + "display_name": "laravel-roadrunner", + "notes": "", + "versus": "swoole" } }] } \ No newline at end of file diff --git a/frameworks/PHP/laravel/composer.json b/frameworks/PHP/laravel/composer.json index c18073b4183..d7617db1cd1 100644 --- a/frameworks/PHP/laravel/composer.json +++ b/frameworks/PHP/laravel/composer.json @@ -12,7 +12,7 @@ }, "require-dev": { "facade/ignition": "^2.3.6", - "fzaninotto/faker": "^1.9.1", + "fakerphp/faker": "^1.9.1", "mockery/mockery": "^1.3.1", "nunomaduro/collision": "^5.0", "phpunit/phpunit": "^9.3" diff --git a/frameworks/PHP/laravel/config/logging.php b/frameworks/PHP/laravel/config/logging.php new file mode 100644 index 00000000000..5d889c62a51 --- /dev/null +++ b/frameworks/PHP/laravel/config/logging.php @@ -0,0 +1,55 @@ + env('LOG_CHANNEL', 'stdout'), + + /* + |-------------------------------------------------------------------------- + | Log Channels + |-------------------------------------------------------------------------- + | + | Here you may configure the log channels for your application. Out of + | the box, Laravel uses the Monolog PHP logging library. This gives + | you a variety of powerful log handlers / formatters to utilize. + | + | Available Drivers: "single", "daily", "slack", "syslog", + | "errorlog", "monolog", + | "custom", "stack" + | + */ + + 'channels' => [ + 'stdout' => [ + 'driver' => 'monolog', + 'handler' => StreamHandler::class, + 'formatter' => env('LOG_STDERR_FORMATTER'), + 'with' => [ + 'stream' => 'php://stdout', + ], + ], + ], + +]; diff --git a/frameworks/PHP/laravel/deploy/conf/php-fpm.conf b/frameworks/PHP/laravel/deploy/conf/php-fpm.conf index 708b2d297cf..d3472a7214d 100644 --- a/frameworks/PHP/laravel/deploy/conf/php-fpm.conf +++ b/frameworks/PHP/laravel/deploy/conf/php-fpm.conf @@ -22,7 +22,8 @@ pid = /run/php/php-fpm.pid ; Note: the default prefix is /var ; Default Value: log/php-fpm.log ;error_log = /var/log/php7.3-fpm.log -error_log = /dev/stderr +;error_log = /dev/stderr +error_log = /proc/self/fd/2 ; syslog_facility is used to specify what type of program is logging the @@ -385,6 +386,7 @@ pm.max_spare_servers = 512 ; The access log file ; Default: not set ;access.log = log/$pool.access.log +;access.log = /proc/self/fd/2 ; The access log format. ; The following syntax is allowed @@ -497,7 +499,7 @@ pm.max_spare_servers = 512 ; Note: on highloaded environement, this can cause some delay in the page ; process time (several ms). ; Default Value: no -;catch_workers_output = yes +; catch_workers_output = yes ; Clear environment in FPM workers ; Prevents arbitrary environment variables from reaching FPM worker processes @@ -548,4 +550,5 @@ pm.max_spare_servers = 512 ;php_flag[display_errors] = off ;php_admin_value[error_log] = /var/log/fpm-php.www.log ;php_admin_flag[log_errors] = on -;php_admin_value[memory_limit] = 32M \ No newline at end of file +;php_admin_value[memory_limit] = 32M + diff --git a/frameworks/PHP/laravel/deploy/roadrunner/.rr.yaml b/frameworks/PHP/laravel/deploy/roadrunner/.rr.yaml new file mode 100644 index 00000000000..37ad105b528 --- /dev/null +++ b/frameworks/PHP/laravel/deploy/roadrunner/.rr.yaml @@ -0,0 +1,22 @@ +# see https://roadrunner.dev/docs/intro-config +server: + command: "php ./vendor/bin/rr-worker start --relay-dsn unix:///usr/local/var/run/rr-rpc.sock" + relay: "unix:///usr/local/var/run/rr-rpc.sock" +logs: + mode: production + level: error +http: + address: 0.0.0.0:8080 + middleware: ["headers", "static", "gzip"] + pool: + max_jobs: 64 # feel free to change this + supervisor: + exec_ttl: 60s + headers: + response: + Server: "RoadRunner" + static: + dir: "public" + forbid: [".php"] + + diff --git a/frameworks/PHP/laravel/deploy/roadrunner/composer.json b/frameworks/PHP/laravel/deploy/roadrunner/composer.json new file mode 100644 index 00000000000..5204786e4d6 --- /dev/null +++ b/frameworks/PHP/laravel/deploy/roadrunner/composer.json @@ -0,0 +1,57 @@ +{ + "name": "laravel/laravel", + "type": "project", + "description": "The Laravel Framework.", + "keywords": [ + "framework", + "laravel" + ], + "license": "MIT", + "require": { + "laravel/framework": "^8.16", + "nyholm/psr7": "*", + "spiral/roadrunner": "^2.0", + "spiral/roadrunner-laravel": "^4.0" + }, + "require-dev": { + "facade/ignition": "^2.3.6", + "fakerphp/faker": "^1.9.1", + "mockery/mockery": "^1.3.1", + "nunomaduro/collision": "^5.0", + "phpunit/phpunit": "^9.3" + }, + "config": { + "optimize-autoloader": true, + "preferred-install": "dist", + "sort-packages": true + }, + "extra": { + "laravel": { + "dont-discover": [] + } + }, + "autoload": { + "psr-4": { + "App\\": "app/" + } + }, + "autoload-dev": { + "psr-4": { + "Tests\\": "tests/" + } + }, + "minimum-stability": "dev", + "prefer-stable": true, + "scripts": { + "post-autoload-dump": [ + "Illuminate\\Foundation\\ComposerScripts::postAutoloadDump", + "@php artisan package:discover --ansi" + ], + "post-root-package-install": [ + "@php -r \"file_exists('.env') || copy('.env.example', '.env');\"" + ], + "post-create-project-cmd": [ + "@php artisan key:generate --ansi" + ] + } +} diff --git a/frameworks/PHP/laravel/deploy/roadrunner/roadrunner-2.0.4-linux-amd64.tar.gz b/frameworks/PHP/laravel/deploy/roadrunner/roadrunner-2.0.4-linux-amd64.tar.gz new file mode 100644 index 00000000000..69c83fe9002 Binary files /dev/null and b/frameworks/PHP/laravel/deploy/roadrunner/roadrunner-2.0.4-linux-amd64.tar.gz differ diff --git a/frameworks/PHP/laravel/deploy/swoole/composer.json b/frameworks/PHP/laravel/deploy/swoole/composer.json index dd7d8a6d8d5..262f2e0ba33 100644 --- a/frameworks/PHP/laravel/deploy/swoole/composer.json +++ b/frameworks/PHP/laravel/deploy/swoole/composer.json @@ -8,13 +8,12 @@ ], "license": "MIT", "require": { - "php": "^7.3", "laravel/framework": "^8.0", "swooletw/laravel-swoole": "^v2.6" }, "require-dev": { "facade/ignition": "^2.3.6", - "fzaninotto/faker": "^1.9.1", + "fakerphp/faker": "^1.9.1", "mockery/mockery": "^1.3.1", "nunomaduro/collision": "^5.0", "phpunit/phpunit": "^9.3" diff --git a/frameworks/PHP/laravel/deploy/swoole/install-composer.sh b/frameworks/PHP/laravel/deploy/swoole/install-composer.sh deleted file mode 100644 index f8e743d6a13..00000000000 --- a/frameworks/PHP/laravel/deploy/swoole/install-composer.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh - -EXPECTED_SIGNATURE="$(curl -s https://composer.github.io/installer.sig)" -php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" -ACTUAL_SIGNATURE="$(php -r "echo hash_file('SHA384', 'composer-setup.php');")" - -if [ "$EXPECTED_SIGNATURE" != "$ACTUAL_SIGNATURE" ] -then - >&2 echo 'ERROR: Invalid installer signature' - rm composer-setup.php - exit 1 -fi - -php composer-setup.php --quiet -RESULT=$? -rm composer-setup.php -exit $RESULT \ No newline at end of file diff --git a/frameworks/PHP/laravel/deploy/swoole/php.ini b/frameworks/PHP/laravel/deploy/swoole/php.ini deleted file mode 100644 index 3cf51cccca9..00000000000 --- a/frameworks/PHP/laravel/deploy/swoole/php.ini +++ /dev/null @@ -1,2 +0,0 @@ -opcache.enable_cli=1 -opcache.validate_timestamps=0 diff --git a/frameworks/PHP/laravel/laravel-laravel-s.dockerfile b/frameworks/PHP/laravel/laravel-laravel-s.dockerfile index bd117766b80..cafea786c1c 100644 --- a/frameworks/PHP/laravel/laravel-laravel-s.dockerfile +++ b/frameworks/PHP/laravel/laravel-laravel-s.dockerfile @@ -16,7 +16,7 @@ RUN chmod -R 777 /laravel RUN apt-get update > /dev/null && \ apt-get install -yqq git unzip > /dev/null -RUN php -r "copy('https://install.phpcomposer.com/installer', 'composer-setup.php');" && php composer-setup.php && php -r "unlink('composer-setup.php');" +RUN php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" && php composer-setup.php && php -r "unlink('composer-setup.php');" RUN mv composer.phar /usr/local/bin/composer COPY deploy/laravel-s/composer.json ./ diff --git a/frameworks/PHP/laravel/laravel-roadrunner.dockerfile b/frameworks/PHP/laravel/laravel-roadrunner.dockerfile new file mode 100644 index 00000000000..017a803461f --- /dev/null +++ b/frameworks/PHP/laravel/laravel-roadrunner.dockerfile @@ -0,0 +1,34 @@ +FROM php:8.0-cli + +RUN docker-php-ext-install pdo_mysql pcntl opcache sockets > /dev/null + +RUN echo "opcache.enable_cli=1" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit=1205" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini + +ADD ./ /laravel +WORKDIR /laravel + +RUN mkdir -p /laravel/bootstrap/cache /laravel/storage/logs /laravel/storage/framework/sessions /laravel/storage/framework/views /laravel/storage/framework/cache +RUN chmod -R 777 /laravel + +RUN apt-get update > /dev/null && \ + apt-get install -yqq git unzip > /dev/null +RUN php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" && php composer-setup.php && php -r "unlink('composer-setup.php');" +RUN mv composer.phar /usr/local/bin/composer + +COPY deploy/roadrunner/composer.json ./ +COPY deploy/roadrunner/.rr.yaml ./ + +RUN composer install -a --no-dev --quiet +RUN php artisan optimize + +# `./vendor/bin/rr get-binary` is github rate-limited +RUN tar xzf deploy/roadrunner/roadrunner-*.tar.gz && mv roadrunner-*/rr . && chmod +x ./rr +RUN php artisan vendor:publish --provider='Spiral\RoadRunnerLaravel\ServiceProvider' --tag=config + +EXPOSE 8080 + +# CMD bash +CMD ./rr serve -c ./.rr.yaml + diff --git a/frameworks/PHP/laravel/laravel-swoole.dockerfile b/frameworks/PHP/laravel/laravel-swoole.dockerfile index 263f6b6b3d8..7f16ac17f71 100644 --- a/frameworks/PHP/laravel/laravel-swoole.dockerfile +++ b/frameworks/PHP/laravel/laravel-swoole.dockerfile @@ -1,37 +1,32 @@ -FROM php:7.4 +FROM php:8.0-cli RUN pecl install swoole > /dev/null && \ docker-php-ext-enable swoole +RUN docker-php-ext-install pdo_mysql pcntl opcache > /dev/null -RUN docker-php-ext-install pdo_mysql > /dev/null +RUN echo "opcache.enable_cli=1" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit=1205" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini +#RUN echo "opcache.jit_buffer_size=128M" >> /usr/local/etc/php/conf.d/docker-php-ext-opcache.ini ADD ./ /laravel WORKDIR /laravel -COPY deploy/swoole/php.ini /usr/local/etc/php/ - -RUN mkdir -p /laravel/bootstrap/cache -RUN mkdir -p /laravel/storage/framework/sessions -RUN mkdir -p /laravel/storage/framework/views -RUN mkdir -p /laravel/storage/framework/cache +RUN mkdir -p /laravel/bootstrap/cache /laravel/storage/framework/sessions /laravel/storage/framework/views /laravel/storage/framework/cache RUN chmod -R 777 /laravel -RUN echo "APP_SWOOLE=true" >> .env - -# Install composer using the installation method documented at https://getcomposer.org/doc/faqs/how-to-install-composer-programmatically.md -# This method was chosen because composer is not part of the apt repositories that are in the default PHP 7.4 docker image -# Adding alternate apt php repos can potentially cause problems with extension compatibility between the php build from the docker image and the alternate php build -# An additional benefit of this method is that the correct version of composer will be used for the environment and version of the php system in the docker image -RUN deploy/swoole/install-composer.sh - RUN apt-get update -yqq > /dev/null && \ apt-get install -yqq git unzip > /dev/null -COPY deploy/swoole/composer* ./ -RUN php composer.phar install -a --no-dev --quiet +RUN php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');" && php composer-setup.php && php -r "unlink('composer-setup.php');" +RUN mv composer.phar /usr/local/bin/composer + +COPY deploy/swoole/composer.json ./ + +RUN echo "APP_SWOOLE=true" >> .env + +RUN composer install -a --no-dev --quiet RUN php artisan optimize -RUN chmod -R 777 /laravel EXPOSE 8080 diff --git a/frameworks/PHP/laravel/laravel.dockerfile b/frameworks/PHP/laravel/laravel.dockerfile index 06a32025277..3c75a2d4b27 100644 --- a/frameworks/PHP/laravel/laravel.dockerfile +++ b/frameworks/PHP/laravel/laravel.dockerfile @@ -18,20 +18,17 @@ WORKDIR /laravel RUN if [ $(nproc) = 2 ]; then sed -i "s|pm.max_children = 1024|pm.max_children = 512|g" /etc/php/8.0/fpm/php-fpm.conf ; fi; -RUN mkdir -p /laravel/bootstrap/cache -RUN mkdir -p /laravel/storage/framework/sessions -RUN mkdir -p /laravel/storage/framework/views -RUN mkdir -p /laravel/storage/framework/cache - +RUN mkdir -p /laravel/bootstrap/cache /laravel/storage/logs /laravel/storage/framework/sessions /laravel/storage/framework/views /laravel/storage/framework/cache RUN chmod -R 777 /laravel RUN composer install --optimize-autoloader --classmap-authoritative --no-dev --quiet - RUN php artisan optimize -RUN chmod -R 777 /laravel - EXPOSE 8080 -CMD service php8.0-fpm start && \ - nginx -c /laravel/deploy/nginx.conf +# Uncomment next line for Laravel console error logging to be viewable in docker logs +# RUN echo "catch_workers_output = yes" >> /etc/php/8.0/fpm/php-fpm.conf + +RUN mkdir -p /run/php +CMD /usr/sbin/php-fpm8.0 --fpm-config /etc/php/8.0/fpm/php-fpm.conf && nginx -c /laravel/deploy/nginx.conf + diff --git a/frameworks/PHP/php-ngx/benchmark_config.json b/frameworks/PHP/php-ngx/benchmark_config.json index d5624e9983f..dae6c4a1b21 100644 --- a/frameworks/PHP/php-ngx/benchmark_config.json +++ b/frameworks/PHP/php-ngx/benchmark_config.json @@ -10,7 +10,7 @@ "database": "None", "framework": "php-ngx", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8 JIT", "orm": "Raw", "platform": "ngx_php", "webserver": "nginx", @@ -31,7 +31,7 @@ "database": "MySQL", "framework": "php-ngx", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8 JIT", "orm": "Raw", "platform": "ngx_php", "webserver": "nginx", @@ -52,7 +52,7 @@ "database": "Postgres", "framework": "php-ngx", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8 JIT", "orm": "Raw", "platform": "ngx_php", "webserver": "nginx", @@ -73,7 +73,7 @@ "database": "MySQL", "framework": "php-ngx", "language": "PHP", - "flavor": "PHP7", + "flavor": "PHP8 JIT", "orm": "Raw", "platform": "ngx_php", "webserver": "nginx", diff --git a/frameworks/PHP/php-ngx/deploy/conf/php.ini b/frameworks/PHP/php-ngx/deploy/conf/php.ini index 322e3070404..e0e979da56e 100644 --- a/frameworks/PHP/php-ngx/deploy/conf/php.ini +++ b/frameworks/PHP/php-ngx/deploy/conf/php.ini @@ -1908,4 +1908,7 @@ opcache.huge_code_pages=1 ; Local Variables: ; tab-width: 4 -; End: \ No newline at end of file +; End: + +opcache.jit_buffer_size=128M +opcache.jit=tracing diff --git a/frameworks/PHP/php-ngx/php-ngx-async.dockerfile b/frameworks/PHP/php-ngx/php-ngx-async.dockerfile index cfa6f8e023a..e9552f71d83 100644 --- a/frameworks/PHP/php-ngx/php-ngx-async.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-async.dockerfile @@ -11,7 +11,7 @@ RUN apt-get update -yqq > /dev/null && \ ADD ./ ./ -ENV NGINX_VERSION 1.19.7 +ENV NGINX_VERSION 1.20.0 RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null diff --git a/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile b/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile index 6745bace617..6e7d7c778d6 100644 --- a/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-mysql.dockerfile @@ -11,7 +11,7 @@ RUN apt-get update -yqq > /dev/null && \ ADD ./ ./ -ENV NGINX_VERSION 1.19.7 +ENV NGINX_VERSION 1.20.0 RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null diff --git a/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile b/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile index 5f7c485ffe9..59b545f016e 100644 --- a/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx-pgsql.dockerfile @@ -11,7 +11,7 @@ RUN apt-get update -yqq > /dev/null && \ ADD ./ ./ -ENV NGINX_VERSION 1.19.7 +ENV NGINX_VERSION 1.20.0 RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null diff --git a/frameworks/PHP/php-ngx/php-ngx.dockerfile b/frameworks/PHP/php-ngx/php-ngx.dockerfile index e6d5762fbf7..1d2fd369c14 100644 --- a/frameworks/PHP/php-ngx/php-ngx.dockerfile +++ b/frameworks/PHP/php-ngx/php-ngx.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ php8.0-cli php8.0-dev libphp8.0-embed php8.0-mysql nginx > /dev/null ADD ./ ./ -ENV NGINX_VERSION 1.19.7 +ENV NGINX_VERSION 1.20.0 RUN git clone -b v0.0.25 --single-branch --depth 1 https://github.com/rryqszq4/ngx_php7.git > /dev/null diff --git a/frameworks/PHP/reactphp/README.md b/frameworks/PHP/reactphp/README.md new file mode 100644 index 00000000000..ec3a45d6bdd --- /dev/null +++ b/frameworks/PHP/reactphp/README.md @@ -0,0 +1,13 @@ +# ReactPH Framework Benchmarking Test + +This is the [ReactPHP Framework](https://reactphp.org/) Event-driven, non-blocking I/O with PHP. + +## Test URLs + +### JSON Encoding Test + +http://localhost:8080/json + +### Plaintext Test + +http://localhost:8080/plaintext diff --git a/frameworks/PHP/reactphp/app.php b/frameworks/PHP/reactphp/app.php new file mode 100644 index 00000000000..7c4e230e10f --- /dev/null +++ b/frameworks/PHP/reactphp/app.php @@ -0,0 +1,141 @@ + PDO::FETCH_ASSOC, + PDO::ATTR_EMULATE_PREPARES => false + ] + ); + $world = $pdo->prepare('SELECT id,randomNumber FROM World WHERE id=?'); + $update = $pdo->prepare('UPDATE World SET randomNumber=? WHERE id=?'); + $fortune = $pdo->prepare('SELECT id,message FROM Fortune'); + $fortune->setFetchMode(PDO::FETCH_KEY_PAIR); +} + +function router(Request $request) +{ + return match($request->getUri()->getPath()) { + '/plaintext' => text(), + '/json' => json(), + '/db' => db(), + '/fortunes' => fortune(), + '/query' => query($request), + '/update' => updateraw($request), + // '/info' => info(), + default => new Response(404, [], 'Error 404'), + }; +} + +function text() +{ + return new Response(200, [ + 'Content-Type' => 'text/plain' + ], 'Hello, World!'); +} + +function json() +{ + return new Response(200, [ + 'Content-Type' => 'application/json' + ], json_encode(['message' => 'Hello, World!'])); +} + +function db() +{ + global $world; + + $world->execute([mt_rand(1, 10000)]); + + return new Response(200, [ + 'Content-Type' => 'application/json' + ], json_encode($world->fetch())); +} + +function query($request) +{ + global $world; + + $query_count = 1; + $q = (int) $request->getQueryParams()['q']; + if ($q > 1) { + $query_count = min($q, 500); + } + + while ($query_count--) { + $world->execute([mt_rand(1, 10000)]); + $arr[] = $world->fetch(); + } + + return new Response(200, [ + 'Content-Type' => 'application/json' + ], json_encode($arr)); +} + +function updateraw($request) +{ + global $world, $update; + + $query_count = 1; + $q = (int) $request->getQueryParams()['q']; + if ($q > 1) { + $query_count = min($q, 500); + } + + while ($query_count--) { + $id = mt_rand(1, 10000); + $world->execute([$id]); + $item = $world->fetch(); + $update->execute( + [$item['randomNumber'] = mt_rand(1, 10000), $id] + ); + + $arr[] = $item; + } + + // $pdo->beginTransaction(); + // foreach($arr as $world) { + // $update->execute([$world['randomNumber'], $world['id']]); + // } + // $pdo->commit(); + return new Response(200, [ + 'Content-Type' => 'application/json' + ], json_encode($arr)); +} + +function fortune() +{ + global $fortune; + + $fortune->execute(); + + $arr = $fortune->fetchAll(); + $arr[0] = 'Additional fortune added at request time.'; + asort($arr); + + $html = ''; + foreach ($arr as $id => $message) { + $message = htmlspecialchars($message, ENT_QUOTES, 'UTF-8'); + $html .= "$id$message"; + } + + return new Response(200, [ + 'Content-Type' => 'text/html; charset=UTF-8', + ], "Fortunes$html
idmessage
" + ); +} + +/* function info() +{ + ob_start(); + phpinfo(); + return new Response(200, ['Content-Type' => 'text/plain'], ob_get_clean()); +} + */ \ No newline at end of file diff --git a/frameworks/PHP/reactphp/benchmark_config.json b/frameworks/PHP/reactphp/benchmark_config.json new file mode 100644 index 00000000000..e36878c84ed --- /dev/null +++ b/frameworks/PHP/reactphp/benchmark_config.json @@ -0,0 +1,28 @@ +{ + "framework": "reactphp", + "tests": [{ + "default": { + "json_url": "/json", + "db_url": "/db", + "query_url": "/query?q=", + "fortune_url": "/fortunes", + "update_url": "/update?q=", + "plaintext_url": "/plaintext", + "port": 8080, + "approach": "Realistic", + "classification": "Platform", + "framework": "reactphp", + "language": "PHP", + "flavor": "PHP7", + "database": "MySQL", + "orm": "Raw", + "platform": "reactphp", + "webserver": "None", + "os": "Linux", + "database_os": "Linux", + "display_name": "reactphp", + "notes": "", + "versus": "php" + } + }] +} diff --git a/frameworks/PHP/reactphp/composer.json b/frameworks/PHP/reactphp/composer.json new file mode 100644 index 00000000000..3c932547811 --- /dev/null +++ b/frameworks/PHP/reactphp/composer.json @@ -0,0 +1,9 @@ +{ + "require": { + "php": ">=5.3.0", + "psr/http-message": "^1.0", + "react/event-loop": "^1.1", + "react/http": "^1.3", + "react/socket": "^1.6" + } +} diff --git a/frameworks/PHP/reactphp/config.toml b/frameworks/PHP/reactphp/config.toml new file mode 100644 index 00000000000..f6eb6194616 --- /dev/null +++ b/frameworks/PHP/reactphp/config.toml @@ -0,0 +1,19 @@ +[framework] +name = "reactphp" + +[main] +urls.plaintext = "/plaintext" +urls.json = "/json" +urls.db = "/db" +urls.query = "/query?q=" +urls.update = "/update?q=" +urls.fortune = "/fortunes" +approach = "Realistic" +classification = "Platform" +database = "MySQL" +database_os = "Linux" +os = "Linux" +orm = "Raw" +platform = "reactphp" +webserver = "None" +versus = "php" diff --git a/frameworks/PHP/reactphp/deploy/conf/php.ini b/frameworks/PHP/reactphp/deploy/conf/php.ini new file mode 100644 index 00000000000..f0c616f9fb2 --- /dev/null +++ b/frameworks/PHP/reactphp/deploy/conf/php.ini @@ -0,0 +1,13 @@ +opcache.enable=1 +opcache.enable_cli=1 +opcache.validate_timestamps=0 +opcache.save_comments=0 +opcache.enable_file_override=1 +opcache.huge_code_pages=1 + +mysqlnd.collect_statistics = Off + +memory_limit = 512M + +opcache.jit_buffer_size=128M +opcache.jit=tracing diff --git a/frameworks/PHP/reactphp/reactphp.dockerfile b/frameworks/PHP/reactphp/reactphp.dockerfile new file mode 100644 index 00000000000..eea73e1d220 --- /dev/null +++ b/frameworks/PHP/reactphp/reactphp.dockerfile @@ -0,0 +1,30 @@ +FROM ubuntu:21.04 + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -yqq && apt-get install -yqq software-properties-common > /dev/null +RUN LC_ALL=C.UTF-8 add-apt-repository ppa:ondrej/php +RUN apt-get update -yqq > /dev/null && \ + apt-get install -yqq git unzip wget curl build-essential \ + php8.0-cli php8.0-mbstring php8.0-dev php8.0-xml php8.0-curl php8.0-mysql > /dev/null + +# An extension is required! +# We deal with concurrencies over 1k, which stream_select doesn't support. +RUN wget http://pear.php.net/go-pear.phar --quiet && php go-pear.phar +#RUN apt-get install -y libuv1-dev > /dev/null +RUN apt-get install -y libevent-dev > /dev/null +#RUN pecl install uv-0.2.4 > /dev/null && echo "extension=uv.so" > /etc/php/8.0/cli/conf.d/uv.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini + +ADD ./ /reactphp +WORKDIR /reactphp + +COPY deploy/conf/* /etc/php/8.0/cli/conf.d/ + +COPY --from=composer:latest /usr/bin/composer /usr/local/bin/composer + +RUN composer install --prefer-dist --optimize-autoloader --no-dev --quiet + +EXPOSE 8080 + +CMD php server.php diff --git a/frameworks/PHP/reactphp/server.php b/frameworks/PHP/reactphp/server.php new file mode 100644 index 00000000000..71bb725715d --- /dev/null +++ b/frameworks/PHP/reactphp/server.php @@ -0,0 +1,19 @@ +listen($socket); + +echo "React Server running at http://0.0.0.0:8080\n"; + +$loop->run(); diff --git a/frameworks/PHP/webman/composer.json b/frameworks/PHP/webman/composer.json index e3ebfa26b7f..fca70845573 100644 --- a/frameworks/PHP/webman/composer.json +++ b/frameworks/PHP/webman/composer.json @@ -25,9 +25,9 @@ }, "require": { "php": ">=7.2", - "workerman/webman-framework": "dev-master", + "workerman/webman-framework": "^1.0", "monolog/monolog": "^2.0", - "vlucas/phpdotenv": "^4.1" + "vlucas/phpdotenv": ">=4.1,<6.0" }, "suggest": { "ext-event": "For better performance. " diff --git a/frameworks/PHP/webman/webman.dockerfile b/frameworks/PHP/webman/webman.dockerfile index 04b8cfec343..556c507fd09 100644 --- a/frameworks/PHP/webman/webman.dockerfile +++ b/frameworks/PHP/webman/webman.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY php.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/PHP/workerman/benchmark_config.json b/frameworks/PHP/workerman/benchmark_config.json index 755d29d0031..68ce09ced79 100644 --- a/frameworks/PHP/workerman/benchmark_config.json +++ b/frameworks/PHP/workerman/benchmark_config.json @@ -62,7 +62,10 @@ "database_os": "Linux", "display_name": "workerman-async-db", "notes": "", - "versus": "php" + "versus": "php", + "tags": [ + "broken" + ] }, "php8-jit": { "json_url": "/json", diff --git a/frameworks/PHP/workerman/workerman-pgsql.dockerfile b/frameworks/PHP/workerman/workerman-pgsql.dockerfile index e2ce3cc5dd4..4822f351ba8 100644 --- a/frameworks/PHP/workerman/workerman-pgsql.dockerfile +++ b/frameworks/PHP/workerman/workerman-pgsql.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY php.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/PHP/workerman/workerman-php8-jit.dockerfile b/frameworks/PHP/workerman/workerman-php8-jit.dockerfile index 693345cb25d..d49e2232813 100644 --- a/frameworks/PHP/workerman/workerman-php8-jit.dockerfile +++ b/frameworks/PHP/workerman/workerman-php8-jit.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY php-jit.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/PHP/workerman/workerman.dockerfile b/frameworks/PHP/workerman/workerman.dockerfile index a6c519eb0d0..1f497d6a8d9 100644 --- a/frameworks/PHP/workerman/workerman.dockerfile +++ b/frameworks/PHP/workerman/workerman.dockerfile @@ -10,7 +10,7 @@ RUN apt-get update -yqq > /dev/null && \ RUN apt-get install -yqq composer > /dev/null RUN apt-get install -y php-pear php8.0-dev libevent-dev > /dev/null -RUN pecl install event-3.0.2 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini +RUN pecl install event-3.0.4 > /dev/null && echo "extension=event.so" > /etc/php/8.0/cli/conf.d/event.ini COPY php.ini /etc/php/8.0/cli/php.ini diff --git a/frameworks/Python/bottle/requirements-pypy.txt b/frameworks/Python/bottle/requirements-pypy.txt index dd9e99a2af2..3823281ebf1 100644 --- a/frameworks/Python/bottle/requirements-pypy.txt +++ b/frameworks/Python/bottle/requirements-pypy.txt @@ -1,4 +1,4 @@ -bottle==0.12.13 +bottle==0.12.19 bottle-sqlalchemy==0.4.3 gunicorn==19.9.0 PyMySQL==0.8.0 diff --git a/frameworks/Python/bottle/requirements.txt b/frameworks/Python/bottle/requirements.txt index d691d48f48f..27ae6549a77 100644 --- a/frameworks/Python/bottle/requirements.txt +++ b/frameworks/Python/bottle/requirements.txt @@ -1,4 +1,4 @@ -bottle==0.12.16 +bottle==0.12.19 bottle-sqlalchemy==0.4.3 greenlet==0.4.14 gunicorn==19.9.0 diff --git a/frameworks/Python/responder/requirements.txt b/frameworks/Python/responder/requirements.txt index 348fabc2352..d739e282fe3 100644 --- a/frameworks/Python/responder/requirements.txt +++ b/frameworks/Python/responder/requirements.txt @@ -1,4 +1,4 @@ -asyncpg==0.20.1 +asyncpg==0.21.0 gunicorn==20.0.4 Jinja2==2.11.3 ujson==2.0.3 diff --git a/frameworks/Python/starlette/requirements.txt b/frameworks/Python/starlette/requirements.txt index 700843dcc41..087f0ef57b4 100644 --- a/frameworks/Python/starlette/requirements.txt +++ b/frameworks/Python/starlette/requirements.txt @@ -1,4 +1,4 @@ -asyncpg==0.20.1 +asyncpg==0.21.0 gunicorn==20.0.4 Jinja2==2.11.3 ujson==2.0.3 diff --git a/frameworks/Python/uvicorn/requirements.txt b/frameworks/Python/uvicorn/requirements.txt index 87f457e6a58..51e7b091185 100644 --- a/frameworks/Python/uvicorn/requirements.txt +++ b/frameworks/Python/uvicorn/requirements.txt @@ -1,4 +1,4 @@ -asyncpg==0.20.1 +asyncpg==0.21.0 gunicorn==20.0.4 Jinja2==2.11.3 ujson==2.0.3 diff --git a/frameworks/Ruby/rails/Gemfile b/frameworks/Ruby/rails/Gemfile index 492f8e66b14..47974b05083 100644 --- a/frameworks/Ruby/rails/Gemfile +++ b/frameworks/Ruby/rails/Gemfile @@ -1,10 +1,12 @@ +ruby '~> 3.0.0' + source 'https://rubygems.org' do gem 'listen', '~> 3.3', group: :development gem 'mysql2', '0.5.3', group: :mysql gem 'oj', '~> 3.11.2' gem 'pg', '1.2.3', group: :postgresql - gem 'puma', '~> 5.2.1' + gem 'puma', '~> 5.3.0' gem 'rails', '~> 6.1.3' gem 'redis', '~> 4.0' gem 'tzinfo-data', '1.2021.1' -end \ No newline at end of file +end diff --git a/frameworks/Ruby/rails/Gemfile.lock b/frameworks/Ruby/rails/Gemfile.lock index 7aeaa3e10e6..64b1d32e90f 100644 --- a/frameworks/Ruby/rails/Gemfile.lock +++ b/frameworks/Ruby/rails/Gemfile.lock @@ -1,60 +1,60 @@ GEM remote: https://rubygems.org/ specs: - actioncable (6.1.3) - actionpack (= 6.1.3) - activesupport (= 6.1.3) + actioncable (6.1.3.2) + actionpack (= 6.1.3.2) + activesupport (= 6.1.3.2) nio4r (~> 2.0) websocket-driver (>= 0.6.1) - actionmailbox (6.1.3) - actionpack (= 6.1.3) - activejob (= 6.1.3) - activerecord (= 6.1.3) - activestorage (= 6.1.3) - activesupport (= 6.1.3) + actionmailbox (6.1.3.2) + actionpack (= 6.1.3.2) + activejob (= 6.1.3.2) + activerecord (= 6.1.3.2) + activestorage (= 6.1.3.2) + activesupport (= 6.1.3.2) mail (>= 2.7.1) - actionmailer (6.1.3) - actionpack (= 6.1.3) - actionview (= 6.1.3) - activejob (= 6.1.3) - activesupport (= 6.1.3) + actionmailer (6.1.3.2) + actionpack (= 6.1.3.2) + actionview (= 6.1.3.2) + activejob (= 6.1.3.2) + activesupport (= 6.1.3.2) mail (~> 2.5, >= 2.5.4) rails-dom-testing (~> 2.0) - actionpack (6.1.3) - actionview (= 6.1.3) - activesupport (= 6.1.3) + actionpack (6.1.3.2) + actionview (= 6.1.3.2) + activesupport (= 6.1.3.2) rack (~> 2.0, >= 2.0.9) rack-test (>= 0.6.3) rails-dom-testing (~> 2.0) rails-html-sanitizer (~> 1.0, >= 1.2.0) - actiontext (6.1.3) - actionpack (= 6.1.3) - activerecord (= 6.1.3) - activestorage (= 6.1.3) - activesupport (= 6.1.3) + actiontext (6.1.3.2) + actionpack (= 6.1.3.2) + activerecord (= 6.1.3.2) + activestorage (= 6.1.3.2) + activesupport (= 6.1.3.2) nokogiri (>= 1.8.5) - actionview (6.1.3) - activesupport (= 6.1.3) + actionview (6.1.3.2) + activesupport (= 6.1.3.2) builder (~> 3.1) erubi (~> 1.4) rails-dom-testing (~> 2.0) rails-html-sanitizer (~> 1.1, >= 1.2.0) - activejob (6.1.3) - activesupport (= 6.1.3) + activejob (6.1.3.2) + activesupport (= 6.1.3.2) globalid (>= 0.3.6) - activemodel (6.1.3) - activesupport (= 6.1.3) - activerecord (6.1.3) - activemodel (= 6.1.3) - activesupport (= 6.1.3) - activestorage (6.1.3) - actionpack (= 6.1.3) - activejob (= 6.1.3) - activerecord (= 6.1.3) - activesupport (= 6.1.3) - marcel (~> 0.3.1) - mimemagic (~> 0.3.2) - activesupport (6.1.3) + activemodel (6.1.3.2) + activesupport (= 6.1.3.2) + activerecord (6.1.3.2) + activemodel (= 6.1.3.2) + activesupport (= 6.1.3.2) + activestorage (6.1.3.2) + actionpack (= 6.1.3.2) + activejob (= 6.1.3.2) + activerecord (= 6.1.3.2) + activesupport (= 6.1.3.2) + marcel (~> 1.0.0) + mini_mime (~> 1.0.2) + activesupport (6.1.3.2) concurrent-ruby (~> 1.0, >= 1.0.2) i18n (>= 1.6, < 2) minitest (>= 5.1) @@ -67,62 +67,66 @@ GEM ffi (1.15.0) globalid (0.4.2) activesupport (>= 4.2.0) - i18n (1.8.9) + i18n (1.8.10) concurrent-ruby (~> 1.0) - listen (3.4.1) + listen (3.5.1) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) - loofah (2.9.0) + loofah (2.9.1) crass (~> 1.0.2) nokogiri (>= 1.5.9) mail (2.7.1) mini_mime (>= 0.1.1) - marcel (0.3.3) - mimemagic (~> 0.3.2) + marcel (1.0.1) method_source (1.0.0) - mimemagic (0.3.5) - mini_mime (1.0.2) + mini_mime (1.0.3) + mini_portile2 (2.5.1) minitest (5.14.4) mysql2 (0.5.3) nio4r (2.5.7) - nokogiri (1.11.2-arm64-darwin) + nokogiri (1.11.3) + mini_portile2 (~> 2.5.0) racc (~> 1.4) - oj (3.11.3) + nokogiri (1.11.3-arm64-darwin) + racc (~> 1.4) + nokogiri (1.11.3-x86_64-linux) + racc (~> 1.4) + oj (3.11.5) pg (1.2.3) - puma (5.2.2) + puma (5.3.0) nio4r (~> 2.0) racc (1.5.2) rack (2.2.3) rack-test (1.1.0) rack (>= 1.0, < 3) - rails (6.1.3) - actioncable (= 6.1.3) - actionmailbox (= 6.1.3) - actionmailer (= 6.1.3) - actionpack (= 6.1.3) - actiontext (= 6.1.3) - actionview (= 6.1.3) - activejob (= 6.1.3) - activemodel (= 6.1.3) - activerecord (= 6.1.3) - activestorage (= 6.1.3) - activesupport (= 6.1.3) + rails (6.1.3.2) + actioncable (= 6.1.3.2) + actionmailbox (= 6.1.3.2) + actionmailer (= 6.1.3.2) + actionpack (= 6.1.3.2) + actiontext (= 6.1.3.2) + actionview (= 6.1.3.2) + activejob (= 6.1.3.2) + activemodel (= 6.1.3.2) + activerecord (= 6.1.3.2) + activestorage (= 6.1.3.2) + activesupport (= 6.1.3.2) bundler (>= 1.15.0) - railties (= 6.1.3) + railties (= 6.1.3.2) sprockets-rails (>= 2.0.0) rails-dom-testing (2.0.3) activesupport (>= 4.2.0) nokogiri (>= 1.6) rails-html-sanitizer (1.3.0) loofah (~> 2.3) - railties (6.1.3) - actionpack (= 6.1.3) - activesupport (= 6.1.3) + railties (6.1.3.2) + actionpack (= 6.1.3.2) + activesupport (= 6.1.3.2) method_source rake (>= 0.8.7) thor (~> 1.0) rake (13.0.3) - rb-fsevent (0.10.4) + rb-fsevent (0.11.0) rb-inotify (0.10.1) ffi (~> 1.0) redis (4.2.5) @@ -145,16 +149,21 @@ GEM PLATFORMS arm64-darwin-20 + ruby + x86_64-linux DEPENDENCIES listen (~> 3.3)! mysql2 (= 0.5.3)! oj (~> 3.11.2)! pg (= 1.2.3)! - puma (~> 5.2.1)! + puma (~> 5.3.0)! rails (~> 6.1.3)! redis (~> 4.0)! tzinfo-data (= 1.2021.1)! +RUBY VERSION + ruby 3.0.0p0 + BUNDLED WITH 2.2.7 diff --git a/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb b/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb index ed5e52f3894..16de6c55e70 100644 --- a/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb +++ b/frameworks/Ruby/rails/app/controllers/hello_world_controller.rb @@ -1,7 +1,10 @@ # frozen_string_literal: true class HelloWorldController < ApplicationController - QUERY_RANGE = (1..10_000).to_a + QUERY_RANGE = 1..10_000 # range of IDs in the Fortune DB + ALL_IDS = QUERY_RANGE.to_a # enumeration of all the IDs in fortune DB + MIN_QUERIES = 1 # min number of records that can be retrieved + MAX_QUERIES = 500 # max number of records that can be retrieved def plaintext render plain: 'Hello, World!' @@ -12,11 +15,11 @@ def json end def db - render json: World.find(Random.rand(1..10_000)) + render json: World.find(random_id) end def query - results = QUERY_RANGE.sample(query_count).map do |id| + results = ALL_IDS.sample(query_count).map do |id| World.find(id) end @@ -24,13 +27,11 @@ def query end def cached_query - results = QUERY_RANGE.sample(query_count).map do |id| - Rails.cache.fetch(id) do - World.find(id).as_json - end + items = Rails.cache.fetch_multi(*ALL_IDS.sample(query_count)) do |id| + World.find(id).as_json end - render json: results + render json: items.values end def fortune @@ -40,13 +41,11 @@ def fortune end def update - worlds = query_count.times.map { Random.rand(1..10_000) }.map do |id| - # get a random row from the database, which we know has 10000 - # rows with ids 1 - 10000 + worlds = query_count.times.map { random_id }.map do |id| world = World.find(id) - random = Random.rand(1..10_000) - random = Random.rand(1..10_000) until random != world.randomNumber - world.update_columns(randomNumber: random) + new_value = random_id + new_value = random_id until new_value != world.randomNumber + world.update_columns(randomNumber: new_value) world end @@ -57,9 +56,13 @@ def update def query_count queries = params[:queries].to_i - return 1 if queries < 1 - return 500 if queries > 500 + return MIN_QUERIES if queries < MIN_QUERIES + return MAX_QUERIES if queries > MAX_QUERIES queries end + + def random_id + Random.rand(QUERY_RANGE) + end end diff --git a/frameworks/Ruby/rails/app/views/hello_world/fortune.html.erb b/frameworks/Ruby/rails/app/views/hello_world/fortune.html.erb index 63b5cc1636e..b450e0827c8 100644 --- a/frameworks/Ruby/rails/app/views/hello_world/fortune.html.erb +++ b/frameworks/Ruby/rails/app/views/hello_world/fortune.html.erb @@ -1,12 +1,12 @@ - - - - -<% @fortunes.each do |fortune| %> - - - - -<% end %> -
idmessage
<%= fortune.id %><%= fortune.message %>
\ No newline at end of file + + id + message + + <% @fortunes.each do |fortune| %> + + <%= fortune.id %> + <%= fortune.message %> + + <% end %> + diff --git a/frameworks/Ruby/rails/app/views/layouts/application.html.erb b/frameworks/Ruby/rails/app/views/layouts/application.html.erb index a0c8af7c0b3..2c125c136fd 100644 --- a/frameworks/Ruby/rails/app/views/layouts/application.html.erb +++ b/frameworks/Ruby/rails/app/views/layouts/application.html.erb @@ -1,9 +1,9 @@ - -Fortunes - - -<%= yield %> - + + Fortunes + + + <%= yield %> + diff --git a/frameworks/Ruby/roda-sequel/Gemfile b/frameworks/Ruby/roda-sequel/Gemfile index a291e16cef1..51a6be9c7be 100644 --- a/frameworks/Ruby/roda-sequel/Gemfile +++ b/frameworks/Ruby/roda-sequel/Gemfile @@ -1,22 +1,19 @@ source 'https://rubygems.org' -gem 'erubi', '~> 1.4' -gem 'json', '~> 2.0' -gem 'passenger', '~> 5.1', :platforms=>[:ruby, :mswin], :require=>false -gem 'puma', '~> 3.9', :require=>false -gem 'sequel', '~> 5.0' -gem 'roda', '~> 3.0' +gem 'erubi', '~> 1.10' +gem 'json', '~> 2.5' +gem 'passenger', '~> 6.0', :platforms=>[:ruby, :mswin], :require=>false +gem 'puma', '~> 5.2', :require=>false +gem 'sequel', '~> 5.44' +gem 'roda', '~> 3.43' gem 'tilt', '~> 2.0', :require=>'tilt/erb' -gem 'torquebox-web', '>= 4.0.0.beta3', '< 5', :platforms=>:jruby, :require=>false -gem 'unicorn', '~> 5.2', :platforms=>[:ruby, :mswin], :require=>false +gem 'unicorn', '~> 6.0', :platforms=>[:ruby, :mswin], :require=>false group :mysql do - gem 'jdbc-mysql', '~> 5.1', :platforms=>:jruby, :require=>'jdbc/mysql' - gem 'mysql2', '~> 0.4', :platforms=>[:ruby, :mswin] + gem 'mysql2', '~> 0.5', :platforms=>[:ruby, :mswin] end group :postgresql do - gem 'jdbc-postgres', '~> 9.4', :platforms=>:jruby, :require=>'jdbc/postgres' - gem 'pg', '~> 0.19', :platforms=>[:ruby, :mswin] - gem 'sequel_pg', '~> 1.6', :platforms=>:ruby, :require=>false + gem 'pg', '~> 1.2', :platforms=>[:ruby, :mswin] + gem 'sequel_pg', '~> 1.14', :platforms=>:ruby, :require=>false end diff --git a/frameworks/Ruby/roda-sequel/Gemfile.lock b/frameworks/Ruby/roda-sequel/Gemfile.lock new file mode 100644 index 00000000000..ff4c9ccd692 --- /dev/null +++ b/frameworks/Ruby/roda-sequel/Gemfile.lock @@ -0,0 +1,46 @@ +GEM + remote: https://rubygems.org/ + specs: + erubi (1.10.0) + json (2.5.1) + kgio (2.11.3) + mysql2 (0.5.3) + nio4r (2.5.7) + passenger (6.0.8) + rack + rake (>= 0.8.1) + pg (1.2.3) + puma (5.2.2) + nio4r (~> 2.0) + rack (2.2.3) + raindrops (0.19.1) + rake (13.0.3) + roda (3.43.1) + rack + sequel (5.44.0) + sequel_pg (1.14.0) + pg (>= 0.18.0, != 1.2.0) + sequel (>= 4.38.0) + tilt (2.0.10) + unicorn (6.0.0) + kgio (~> 2.6) + raindrops (~> 0.7) + +PLATFORMS + x86_64-linux + +DEPENDENCIES + erubi (~> 1.10) + json (~> 2.5) + mysql2 (~> 0.5) + passenger (~> 6.0) + pg (~> 1.2) + puma (~> 5.2) + roda (~> 3.43) + sequel (~> 5.44) + sequel_pg (~> 1.14) + tilt (~> 2.0) + unicorn (~> 6.0) + +BUNDLED WITH + 2.2.16 diff --git a/frameworks/Ruby/roda-sequel/benchmark_config.json b/frameworks/Ruby/roda-sequel/benchmark_config.json index d1a38c0643f..59aca086f9b 100644 --- a/frameworks/Ruby/roda-sequel/benchmark_config.json +++ b/frameworks/Ruby/roda-sequel/benchmark_config.json @@ -133,50 +133,6 @@ "display_name": "roda-sequel-postgres-unicorn-mri", "versus": "rack-sequel-postgres-unicorn-mri", "notes": "" - }, - "torquebox-jruby": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "MySQL", - "framework": "roda-sequel", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "TorqueBox", - "os": "Linux", - "database_os": "Linux", - "display_name": "roda-sequel-torquebox-jruby", - "versus": "rack-sequel-torquebox-jruby", - "notes": "" - }, - "postgres-torquebox-jruby": { - "json_url": "/json", - "db_url": "/db", - "query_url": "/queries?queries=", - "fortune_url": "/fortunes", - "update_url": "/updates?queries=", - "plaintext_url": "/plaintext", - "port": 8080, - "approach": "Realistic", - "classification": "Micro", - "database": "Postgres", - "framework": "roda-sequel", - "language": "Ruby", - "orm": "Full", - "platform": "Rack", - "webserver": "TorqueBox", - "os": "Linux", - "database_os": "Linux", - "display_name": "roda-sequel-postgres-torquebox-jruby", - "versus": "rack-sequel-postgres-torquebox-jruby", - "notes": "" } } ] diff --git a/frameworks/Ruby/roda-sequel/hello_world.rb b/frameworks/Ruby/roda-sequel/hello_world.rb index 2a5d283cb01..8bd23c36fc0 100644 --- a/frameworks/Ruby/roda-sequel/hello_world.rb +++ b/frameworks/Ruby/roda-sequel/hello_world.rb @@ -2,57 +2,61 @@ # Our Rack application to be executed by rackup class HelloWorld < Roda - plugin :default_headers, 'Content-Type'=>'text/html; charset=utf-8' - plugin :default_headers, 'Server'=>SERVER_STRING if SERVER_STRING plugin :hooks - plugin :json - plugin :render, :escape=>true, :layout_opts=>{ :cache_key=>'default_layout' } + plugin :render, escape: true, layout_opts: { cache_key: 'default_layout' } plugin :static_routing def bounded_queries queries = request['queries'].to_i return QUERIES_MIN if queries < QUERIES_MIN return QUERIES_MAX if queries > QUERIES_MAX + queries end # Return a random number between 1 and MAX_PK def rand1 - rand(MAX_PK).succ + rand(MAX_PK) + 1 end after do response['Date'] = Time.now.httpdate + response['Server'] = SERVER_STRING if SERVER_STRING end # Test type 1: JSON serialization static_get '/json' do |_| - { :message=>'Hello, World!' } + response['Content-Type'] = 'application/json' + + { message: 'Hello, World!' }.to_json end # Test type 2: Single database query static_get '/db' do |_| - World.with_pk(rand1).values + response['Content-Type'] = 'application/json' + + World.with_pk(rand1).values.to_json end # Test type 3: Multiple database queries static_get '/queries' do |_| - worlds = - DB.synchronize do - Array.new(bounded_queries) do - World.with_pk(rand1) - end + response['Content-Type'] = 'application/json' + worlds = DB.synchronize do + Array.new(bounded_queries) do + World.with_pk(rand1).values end + end - worlds.map!(&:values) + worlds.to_json end # Test type 4: Fortunes static_get '/fortunes' do |_| + response['Content-Type'] = 'text/html; charset=utf-8' @fortunes = Fortune.all @fortunes << Fortune.new( - :id=>0, - :message=>'Additional fortune added at request time.' + id: 0, + message: 'Additional fortune added at request time.' ) @fortunes.sort_by!(&:message) @@ -61,16 +65,18 @@ def rand1 # Test type 5: Database updates static_get '/updates' do |_| - worlds = - DB.synchronize do - Array.new(bounded_queries) do - world = World.with_pk(rand1) - world.update(:randomnumber=>rand1) - world - end + response['Content-Type'] = 'application/json' + worlds = DB.synchronize do + Array.new(bounded_queries) do + world = World.with_pk(rand1) + new_value = rand1 + new_value = rand1 while new_value == world.randomnumber + world.update(randomnumber: new_value) + world.values end + end - worlds.map!(&:values) + worlds.to_json end # Test type 6: Plaintext diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-passenger-mri.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-passenger-mri.dockerfile index 4b778340beb..027113dc460 100644 --- a/frameworks/Ruby/roda-sequel/roda-sequel-passenger-mri.dockerfile +++ b/frameworks/Ruby/roda-sequel/roda-sequel-passenger-mri.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:2.7 ADD ./ /roda-sequel WORKDIR /roda-sequel diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-postgres-passenger-mri.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-postgres-passenger-mri.dockerfile index 2e661ee8fcf..ea23e096d3d 100644 --- a/frameworks/Ruby/roda-sequel/roda-sequel-postgres-passenger-mri.dockerfile +++ b/frameworks/Ruby/roda-sequel/roda-sequel-postgres-passenger-mri.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:2.7 ADD ./ /roda-sequel WORKDIR /roda-sequel diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-postgres-torquebox-jruby.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-postgres-torquebox-jruby.dockerfile deleted file mode 100644 index f86eb233790..00000000000 --- a/frameworks/Ruby/roda-sequel/roda-sequel-postgres-torquebox-jruby.dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM jruby:9.1 - -ADD ./ /roda-sequel -WORKDIR /roda-sequel - -ENV THREAD_FACTOR=2 - -RUN bundle install --jobs=4 --gemfile=/roda-sequel/Gemfile --path=/roda-sequel/roda-sequel/bundle - -ENV DBTYPE=postgresql - -EXPOSE 8080 - -CMD export MAX_CONCURRENCY=$(( 2 * $(nproc) )) && bundle exec torquebox run --io-threads $(( MAX_CONCURRENCY / 2 )) --worker-threads $MAX_CONCURRENCY -b 0.0.0.0 -p 8080 -e production diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-postgres-unicorn-mri.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-postgres-unicorn-mri.dockerfile index 5531da2d91a..701bdf9f774 100644 --- a/frameworks/Ruby/roda-sequel/roda-sequel-postgres-unicorn-mri.dockerfile +++ b/frameworks/Ruby/roda-sequel/roda-sequel-postgres-unicorn-mri.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:2.7 ADD ./ /roda-sequel WORKDIR /roda-sequel diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-postgres.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-postgres.dockerfile index 71b6d01d875..225fa45d8af 100644 --- a/frameworks/Ruby/roda-sequel/roda-sequel-postgres.dockerfile +++ b/frameworks/Ruby/roda-sequel/roda-sequel-postgres.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:2.7 ADD ./ /roda-sequel WORKDIR /roda-sequel diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-torquebox-jruby.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-torquebox-jruby.dockerfile deleted file mode 100644 index 602556ec742..00000000000 --- a/frameworks/Ruby/roda-sequel/roda-sequel-torquebox-jruby.dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM jruby:9.1 - -ADD ./ /roda-sequel -WORKDIR /roda-sequel - -ENV THREAD_FACTOR=2 - -RUN bundle install --jobs=4 --gemfile=/roda-sequel/Gemfile --path=/roda-sequel/roda-sequel/bundle - -ENV DBTYPE=mysql - -EXPOSE 8080 - -CMD export MAX_CONCURRENCY=$(( 2 * $(nproc) )) && bundle exec torquebox run --io-threads $(( MAX_CONCURRENCY / 2 )) --worker-threads $MAX_CONCURRENCY -b 0.0.0.0 -p 8080 -e production diff --git a/frameworks/Ruby/roda-sequel/roda-sequel-unicorn-mri.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel-unicorn-mri.dockerfile index 35cb55f3a6e..bb1e23b9ca6 100644 --- a/frameworks/Ruby/roda-sequel/roda-sequel-unicorn-mri.dockerfile +++ b/frameworks/Ruby/roda-sequel/roda-sequel-unicorn-mri.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:2.7 ADD ./ /roda-sequel WORKDIR /roda-sequel diff --git a/frameworks/Ruby/roda-sequel/roda-sequel.dockerfile b/frameworks/Ruby/roda-sequel/roda-sequel.dockerfile index 8220712dd1d..edd21ba8e12 100644 --- a/frameworks/Ruby/roda-sequel/roda-sequel.dockerfile +++ b/frameworks/Ruby/roda-sequel/roda-sequel.dockerfile @@ -1,4 +1,4 @@ -FROM ruby:2.4 +FROM ruby:2.7 ADD ./ /roda-sequel WORKDIR /roda-sequel diff --git a/frameworks/Rust/faf/Cargo.lock b/frameworks/Rust/faf/Cargo.lock new file mode 100644 index 00000000000..6b9442cf4f5 --- /dev/null +++ b/frameworks/Rust/faf/Cargo.lock @@ -0,0 +1,81 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "cc" +version = "1.0.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd" + +[[package]] +name = "faf" +version = "0.1.1" +source = "git+https://github.com/errantmind/faf#013c81162f09ce376c0b761ed5531d1d47e33c94" +dependencies = [ + "faf-pico-sys", + "num_cpus", + "sys-call", +] + +[[package]] +name = "faf-ex" +version = "0.1.0" +dependencies = [ + "faf", + "mimalloc", +] + +[[package]] +name = "faf-pico-sys" +version = "0.1.1" +source = "git+https://github.com/errantmind/faf-pico-sys#7f273a13302a04ba93f4edbfda5ad3073a7832f9" +dependencies = [ + "cc", +] + +[[package]] +name = "hermit-abi" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c" +dependencies = [ + "libc", +] + +[[package]] +name = "libc" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56d855069fafbb9b344c0f962150cd2c1187975cb1c22c1522c240d8c4986714" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2396cf99d2f58611cd69f0efeee4af3d2e2c7b61bed433515029163aa567e65c" +dependencies = [ + "cc", +] + +[[package]] +name = "mimalloc" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7c6b11afd1e5e689ac96b6d18b1fc763398fe3d7eed99e8773426bc2033dfb" +dependencies = [ + "libmimalloc-sys", +] + +[[package]] +name = "num_cpus" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "sys-call" +version = "0.1.1" +source = "git+https://github.com/errantmind/sys-call-rs#46fc51c2dbc3c54e60424d695eae615910b2e99c" diff --git a/frameworks/Rust/faf/Cargo.toml b/frameworks/Rust/faf/Cargo.toml index 432cb421243..aed8510c249 100644 --- a/frameworks/Rust/faf/Cargo.toml +++ b/frameworks/Rust/faf/Cargo.toml @@ -15,4 +15,3 @@ overflow-checks = false [dependencies] mimalloc = { version = "*", default-features = false } faf = { git = "https://github.com/errantmind/faf" } -#faf = { path = "/home/errant/dev/projects/faf-epoll" } diff --git a/frameworks/Rust/faf/src/main.rs b/frameworks/Rust/faf/src/main.rs index 38df62a5585..28ce6454082 100644 --- a/frameworks/Rust/faf/src/main.rs +++ b/frameworks/Rust/faf/src/main.rs @@ -1,3 +1,5 @@ +#![feature(core_intrinsics)] + #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; @@ -6,11 +8,12 @@ use faf::const_config::*; use faf::const_http::*; use faf::extern_http_date; use faf::util::{const_len, memcmp}; +use std::intrinsics::likely; const ROUTE_PLAINTEXT: &[u8] = b"/p"; const ROUTE_PLAINTEXT_LEN: usize = const_len(ROUTE_PLAINTEXT); -const ROUTE_JSON: &[u8] = b"/j"; -const ROUTE_JSON_LEN: usize = const_len(ROUTE_JSON); +// const ROUTE_JSON: &[u8] = b"/j"; +// const ROUTE_JSON_LEN: usize = const_len(ROUTE_JSON); const TEXT_PLAIN_CONTENT_TYPE: &[u8] = b"Content-Type: text/plain"; const CONTENT_LENGTH: &[u8] = b"Content-Length: "; @@ -40,55 +43,53 @@ fn cb( path_len: usize, _headers: &[faf::phr_header; MAX_HEADERS_TO_PARSE], _num_headers: usize, - response_buffer: &mut [u8; REQ_RES_BUFF_SIZE], + response_buffer: *mut u8, ) -> usize { - if method_len < GET_LEN || path_len < ROUTE_PLAINTEXT_LEN { - return 0; - } unsafe { - if memcmp(GET.as_ptr() as *const i8, method, GET_LEN) == 0 { - // For performance purposes, this will successfully match '/p' to '/plaintext' and '/pickle'. Use with caution - if memcmp(ROUTE_PLAINTEXT.as_ptr() as *const i8, path, ROUTE_PLAINTEXT_LEN) == 0 { - let mut date_buff = crate::extern_http_date::get_buff_with_date(); - extern_http_date::get_http_date(&mut date_buff); - std::ptr::copy_nonoverlapping(PLAINTEXT_BASE.as_ptr(), response_buffer.as_mut_ptr(), PLAINTEXT_BASE_LEN); - std::ptr::copy_nonoverlapping( - date_buff.as_ptr(), - response_buffer.as_mut_ptr().add(PLAINTEXT_BASE_LEN), - DATE_LEN, - ); - std::ptr::copy_nonoverlapping( - CRLFCRLF.as_ptr(), - response_buffer.as_mut_ptr().add(PLAINTEXT_BASE_LEN + DATE_LEN), - CRLFCRLF_LEN, - ); - std::ptr::copy_nonoverlapping( - PLAINTEXT_BODY.as_ptr(), - response_buffer.as_mut_ptr().add(PLAINTEXT_BASE_LEN + DATE_LEN + CRLFCRLF_LEN), - PLAINTEXT_BODY_LEN, - ); + if likely(method_len >= GET_LEN && path_len >= ROUTE_PLAINTEXT_LEN) { + if likely(memcmp(GET.as_ptr() as *const i8, method, GET_LEN) == 0) { + // For performance purposes, this will successfully match '/p' to '/plaintext' and '/pickle'. Use with caution + if likely(memcmp(ROUTE_PLAINTEXT.as_ptr() as *const i8, path, ROUTE_PLAINTEXT_LEN) == 0) { + let mut date_buff = crate::extern_http_date::get_buff_with_date(); + extern_http_date::get_http_date(&mut date_buff); + std::ptr::copy_nonoverlapping(PLAINTEXT_BASE.as_ptr(), response_buffer, PLAINTEXT_BASE_LEN); + std::ptr::copy_nonoverlapping( + date_buff.as_ptr(), + response_buffer.add(PLAINTEXT_BASE_LEN), + DATE_LEN, + ); + std::ptr::copy_nonoverlapping( + CRLFCRLF.as_ptr(), + response_buffer.add(PLAINTEXT_BASE_LEN + DATE_LEN), + CRLFCRLF_LEN, + ); + std::ptr::copy_nonoverlapping( + PLAINTEXT_BODY.as_ptr(), + response_buffer.add(PLAINTEXT_BASE_LEN + DATE_LEN + CRLFCRLF_LEN), + PLAINTEXT_BODY_LEN, + ); - return PLAINTEXT_BASE_LEN + DATE_LEN + CRLFCRLF_LEN + PLAINTEXT_BODY_LEN; - } else if memcmp(ROUTE_JSON.as_ptr() as *const i8, path, ROUTE_JSON_LEN) == 0 { + PLAINTEXT_BASE_LEN + DATE_LEN + CRLFCRLF_LEN + PLAINTEXT_BODY_LEN + } else { + std::ptr::copy_nonoverlapping( + HTTP_404_NOTFOUND.as_ptr(), + response_buffer, + HTTP_404_NOTFOUND_LEN, + ); + HTTP_404_NOTFOUND_LEN + } } else { std::ptr::copy_nonoverlapping( - HTTP_404_NOTFOUND.as_ptr(), - response_buffer.as_mut_ptr(), - HTTP_404_NOTFOUND_LEN, + HTTP_405_NOTALLOWED.as_ptr(), + response_buffer, + HTTP_405_NOTALLOWED_LEN, ); - return HTTP_404_NOTFOUND_LEN; + HTTP_405_NOTALLOWED_LEN } } else { - std::ptr::copy_nonoverlapping( - HTTP_405_NOTALLOWED.as_ptr(), - response_buffer.as_mut_ptr(), - HTTP_405_NOTALLOWED_LEN, - ); - return HTTP_405_NOTALLOWED_LEN; + 0 } - }; - - 0 + } } #[inline] diff --git a/frameworks/Rust/may-minihttp/Cargo.toml b/frameworks/Rust/may-minihttp/Cargo.toml index af243702953..93b0fdff431 100644 --- a/frameworks/Rust/may-minihttp/Cargo.toml +++ b/frameworks/Rust/may-minihttp/Cargo.toml @@ -5,19 +5,15 @@ authors = ["Xudong Huang "] edition = "2018" [dependencies] -markup = "0.4" +may = "0.3" mimalloc = "0.1" num_cpus = "1.0" oorandom = "11" smallvec = "1.1" -v_htmlescape = "0.10" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -may = "0.3" +yarte = { version = "0.15", features = ["bytes-buf", "json"] } may_postgres = { git = "https://github.com/Xudong-Huang/may_postgres.git" } may_minihttp = { git = "https://github.com/Xudong-Huang/may_minihttp.git" } [profile.release] lto = true codegen-units = 1 - diff --git a/frameworks/Rust/may-minihttp/may-minihttp.dockerfile b/frameworks/Rust/may-minihttp/may-minihttp.dockerfile index f1139ea5f43..6070eb6659f 100644 --- a/frameworks/Rust/may-minihttp/may-minihttp.dockerfile +++ b/frameworks/Rust/may-minihttp/may-minihttp.dockerfile @@ -1,6 +1,6 @@ -FROM rust:1.44 +FROM rust:1.51 -RUN apt-get update -yqq && apt-get install -yqq cmake +RUN apt-get update -yqq && apt-get install -yqq cmake g++ ADD ./ /may WORKDIR /may diff --git a/frameworks/Rust/may-minihttp/src/main.rs b/frameworks/Rust/may-minihttp/src/main.rs index b730e5d9175..2ca8424ec93 100644 --- a/frameworks/Rust/may-minihttp/src/main.rs +++ b/frameworks/Rust/may-minihttp/src/main.rs @@ -1,16 +1,17 @@ #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; +use std::borrow::Cow; use std::fmt::Write; use std::io; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; -use may_minihttp::{BodyWriter, HttpService, HttpServiceFactory, Request, Response}; -use may_postgres::{self, Client, RowStream, Statement}; +use may_minihttp::{HttpService, HttpServiceFactory, Request, Response}; +use may_postgres::{self, types::ToSql, Client, Statement}; use oorandom::Rand32; -use serde::Serialize; use smallvec::SmallVec; +use yarte::{ywrite_html, Serialize}; mod utils { use may_postgres::types::ToSql; @@ -46,29 +47,7 @@ struct WorldRow { #[derive(Serialize)] pub struct Fortune { id: i32, - message: String, -} - -markup::define! { - FortunesTemplate(fortunes: Vec) { - {markup::doctype()} - html { - head { - title { "Fortunes" } - } - body { - table { - tr { th { "id" } th { "message" } } - @for item in {fortunes} { - tr { - td { {item.id} } - td { {markup::raw(v_htmlescape::escape(&item.message))} } - } - } - } - } - } - } + message: Cow<'static, str>, } struct PgConnectionPool { @@ -101,21 +80,40 @@ struct PgConnection { client: Client, world: Statement, fortune: Statement, + updates: Vec, } impl PgConnection { fn new(db_url: &str) -> Self { let client = may_postgres::connect(db_url).unwrap(); - let world = client - .prepare("SELECT id, randomnumber FROM world WHERE id=$1") - .unwrap(); - - let fortune = client.prepare("SELECT id, message FROM fortune").unwrap(); + let world = client.prepare("SELECT * FROM world WHERE id=$1").unwrap(); + + let fortune = client.prepare("SELECT * FROM fortune").unwrap(); + + let mut updates = Vec::new(); + for num in 1..=500u16 { + let mut pl: u16 = 1; + let mut q = String::new(); + q.push_str("UPDATE world SET randomnumber = CASE id "); + for _ in 1..=num { + let _ = write!(&mut q, "when ${} then ${} ", pl, pl + 1); + pl += 2; + } + q.push_str("ELSE randomnumber END WHERE id IN ("); + for _ in 1..=num { + let _ = write!(&mut q, "${},", pl); + pl += 1; + } + q.pop(); + q.push(')'); + updates.push(client.prepare(&q).unwrap()); + } PgConnection { client, world, fortune, + updates, } } @@ -136,17 +134,17 @@ impl PgConnection { &self, num: usize, rand: &mut Rand32, - ) -> Result, may_postgres::Error> { - let mut queries = SmallVec::<[RowStream; 32]>::new(); + ) -> Result, may_postgres::Error> { + let mut queries = SmallVec::<[_; 32]>::new(); for _ in 0..num { - let random_id = rand.rand_range(1..10001) as i32; + let random_id = (rand.rand_u32() % 10_000 + 1) as i32; queries.push( self.client .query_raw(&self.world, utils::slice_iter(&[&random_id]))?, ); } - let mut worlds = Vec::with_capacity(num); + let mut worlds = SmallVec::<[_; 32]>::new(); for mut q in queries { match q.next().transpose()? { Some(row) => worlds.push(WorldRow { @@ -159,47 +157,50 @@ impl PgConnection { Ok(worlds) } - fn updates(&self, num: usize, rand: &mut Rand32) -> Result, may_postgres::Error> { - let mut queries = SmallVec::<[RowStream; 32]>::new(); + fn updates( + &self, + num: usize, + rand: &mut Rand32, + ) -> Result, may_postgres::Error> { + let mut queries = SmallVec::<[_; 32]>::new(); for _ in 0..num { - let random_id = rand.rand_range(1..10001) as i32; + let random_id = (rand.rand_u32() % 10_000 + 1) as i32; queries.push( self.client .query_raw(&self.world, utils::slice_iter(&[&random_id]))?, ); } - let mut worlds = Vec::with_capacity(num); + let mut worlds = SmallVec::<[_; 32]>::new(); for mut q in queries { + let new_random_num = (rand.rand_u32() % 10_000 + 1) as i32; match q.next().transpose()? { Some(row) => worlds.push(WorldRow { id: row.get(0), - randomnumber: row.get(1), + randomnumber: new_random_num, }), None => unreachable!(), } } - let mut update = String::with_capacity(120 + 12 * num); - update.push_str("UPDATE world SET randomnumber = temp.randomnumber FROM (VALUES "); - - for w in &mut worlds { - w.randomnumber = rand.rand_range(1..10001) as i32; - let _ = write!(&mut update, "({}, {}),", w.id, w.randomnumber); + let mut params: Vec<&(dyn ToSql + Sync)> = Vec::with_capacity(num * 3); + for w in &worlds { + params.push(&w.id); + params.push(&w.randomnumber); + } + for w in &worlds { + params.push(&w.id); } - update.pop(); - update.push_str(" ORDER BY 1) AS temp(id, randomnumber) WHERE temp.id = world.id"); - self.client.simple_query(&update)?; + self.client.query(&self.updates[num - 1], ¶ms)?; Ok(worlds) } fn tell_fortune(&self) -> Result, may_postgres::Error> { - let mut items = Vec::with_capacity(80); - items.push(Fortune { + let mut items = vec![Fortune { id: 0, - message: "Additional fortune added at request time.".to_string(), - }); + message: Cow::Borrowed("Additional fortune added at request time."), + }]; let rows = self .client @@ -209,7 +210,7 @@ impl PgConnection { let r = row?; items.push(Fortune { id: r.get(0), - message: r.get(1), + message: Cow::Owned(r.get(1)), }); } @@ -229,38 +230,38 @@ impl HttpService for Techempower { match req.path() { "/json" => { rsp.header("Content-Type: application/json"); - serde_json::to_writer( - BodyWriter(rsp.body_mut()), - &HeloMessage { - message: "Hello, World!", - }, - )?; + let msg = HeloMessage { + message: "Hello, World!", + }; + msg.to_bytes_mut(rsp.body_mut()); } "/plaintext" => { rsp.header("Content-Type: text/plain").body("Hello, World!"); } "/db" => { - let random_id = self.rng.rand_range(1..10001) as i32; - let world = self.db.get_world(random_id).unwrap(); rsp.header("Content-Type: application/json"); - serde_json::to_writer(BodyWriter(rsp.body_mut()), &world)?; + let random_id = (self.rng.rand_u32() % 10_000 + 1) as i32; + let world = self.db.get_world(random_id).unwrap(); + world.to_bytes_mut(rsp.body_mut()) } "/fortunes" => { - let fortunes = self.db.tell_fortune().unwrap(); rsp.header("Content-Type: text/html; charset=utf-8"); - write!(rsp.body_mut(), "{}", FortunesTemplate { fortunes }).unwrap(); + let fortunes = self.db.tell_fortune().unwrap(); + let mut body = Vec::with_capacity(2048); + ywrite_html!(body, "{{> fortune }}"); + rsp.body_vec(body); } p if p.starts_with("/queries") => { + rsp.header("Content-Type: application/json"); let q = utils::get_query_param(p) as usize; let worlds = self.db.get_worlds(q, &mut self.rng).unwrap(); - rsp.header("Content-Type: application/json"); - serde_json::to_writer(BodyWriter(rsp.body_mut()), &worlds)?; + worlds.to_bytes_mut(rsp.body_mut()); } p if p.starts_with("/updates") => { + rsp.header("Content-Type: application/json"); let q = utils::get_query_param(p) as usize; let worlds = self.db.updates(q, &mut self.rng).unwrap(); - rsp.header("Content-Type: application/json"); - serde_json::to_writer(BodyWriter(rsp.body_mut()), &worlds)?; + worlds.to_bytes_mut(rsp.body_mut()); } _ => { rsp.status_code("404", "Not Found"); @@ -289,6 +290,7 @@ fn main() { may::config() .set_pool_capacity(10000) .set_stack_size(0x1000); + println!("Starting http server: 127.0.0.1:8080"); let server = HttpServer { db_pool: PgConnectionPool::new( "postgres://benchmarkdbuser:benchmarkdbpass@tfb-database/hello_world", diff --git a/frameworks/Rust/salvo/templates/fortune.hbs b/frameworks/Rust/may-minihttp/templates/fortune.hbs similarity index 87% rename from frameworks/Rust/salvo/templates/fortune.hbs rename to frameworks/Rust/may-minihttp/templates/fortune.hbs index c6d72bca4a4..b9e25a52a8e 100644 --- a/frameworks/Rust/salvo/templates/fortune.hbs +++ b/frameworks/Rust/may-minihttp/templates/fortune.hbs @@ -1,5 +1,5 @@ Fortunes - {{~# each items ~}} + {{~# each fortunes ~}} {{~/each ~}}
idmessage
{{id}}{{message}}
diff --git a/frameworks/Rust/ntex/Cargo.toml b/frameworks/Rust/ntex/Cargo.toml index 7edf66ccc77..8b92b1ba84f 100755 --- a/frameworks/Rust/ntex/Cargo.toml +++ b/frameworks/Rust/ntex/Cargo.toml @@ -16,20 +16,20 @@ name = "ntex-raw" path = "src/main_raw.rs" [dependencies] -ntex = "0.3.12" +ntex = "0.3.17" mimalloc = { version = "0.1.25", default-features = false } snmalloc-rs = { version = "0.2.26", features = ["1mib", "native-cpu"] } yarte = { version = "0.15", features = ["bytes-buf", "json"] } env_logger = "0.8" -random-fast-rng = "0.1.1" +nanorand = { version = "0.5", default-features = false, features = ["std", "wyrand"] } bytes = "1.0" -atoi = "0.4.0" +atoi = "0.4" num_cpus = "1.13" -futures = "0.3.13" +futures = "0.3" http = "0.2" smallvec = "1.6.1" -simd-json = "0.3.24" -simd-json-derive = "0.1.15" +simd-json = "0.4.6" +simd-json-derive = "0.2.2" serde = { version = "1.0", features = ["derive"] } log = { version = "0.4", features = ["release_max_level_off"] } tokio = "1" diff --git a/frameworks/Rust/ntex/ntex-db.dockerfile b/frameworks/Rust/ntex/ntex-db.dockerfile index 1acff5a31f2..e9d361c6b6c 100644 --- a/frameworks/Rust/ntex/ntex-db.dockerfile +++ b/frameworks/Rust/ntex/ntex-db.dockerfile @@ -1,7 +1,7 @@ -FROM rust:1.50 +FROM rust:1.52.1 # Disable simd at jsonescape -ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= +# ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/ntex/ntex-raw.dockerfile b/frameworks/Rust/ntex/ntex-raw.dockerfile index 69970690422..3a4b2aace37 100644 --- a/frameworks/Rust/ntex/ntex-raw.dockerfile +++ b/frameworks/Rust/ntex/ntex-raw.dockerfile @@ -1,7 +1,7 @@ -FROM rust:1.50 +FROM rust:1.52.1 # Disable simd at jsonescape -ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= +# ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/ntex/ntex.dockerfile b/frameworks/Rust/ntex/ntex.dockerfile index d237b5adf3a..4d9a95643aa 100644 --- a/frameworks/Rust/ntex/ntex.dockerfile +++ b/frameworks/Rust/ntex/ntex.dockerfile @@ -1,7 +1,7 @@ -FROM rust:1.50 +FROM rust:1.52.1 # Disable simd at jsonescape -ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= +# ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/ntex/src/db.rs b/frameworks/Rust/ntex/src/db.rs index f9d4b28b931..56a3ca67f59 100644 --- a/frameworks/Rust/ntex/src/db.rs +++ b/frameworks/Rust/ntex/src/db.rs @@ -1,8 +1,8 @@ use std::{borrow::Cow, cell::RefCell, fmt::Write as FmtWrite}; use bytes::{Bytes, BytesMut}; -use futures::{stream::futures_unordered::FuturesUnordered, Future, FutureExt, StreamExt}; -use random_fast_rng::{FastRng, Random}; +use futures::{Future, FutureExt}; +use nanorand::{WyRand, RNG}; use smallvec::SmallVec; use tokio_postgres::types::ToSql; use tokio_postgres::{connect, Client, NoTls, Statement}; @@ -25,7 +25,7 @@ pub struct PgConnection { cl: Client, fortune: Statement, world: Statement, - rng: RefCell, + rng: RefCell, updates: Vec, } @@ -62,14 +62,14 @@ impl PgConnection { fortune, world, updates, - rng: RefCell::new(FastRng::new()), + rng: RefCell::new(WyRand::new()), } } } impl PgConnection { pub fn get_world(&self) -> impl Future { - let random_id = (self.rng.borrow_mut().get_u32() % 10_000 + 1) as i32; + let random_id = (self.rng.borrow_mut().generate::() % 10_000 + 1) as i32; self.cl.query(&self.world, &[&random_id]).map(|rows| { let rows = rows.unwrap(); World { @@ -81,29 +81,33 @@ impl PgConnection { } pub fn get_worlds(&self, num: u16) -> impl Future> { - let worlds = FuturesUnordered::new(); + let mut futs = Vec::with_capacity(num as usize); let mut rng = self.rng.borrow_mut(); for _ in 0..num { - let w_id = (rng.get_u32() % 10_000 + 1) as i32; - worlds.push(self.cl.query(&self.world, &[&w_id]).map(|res| { - let rows = res.unwrap(); - World { + let w_id = (rng.generate::() % 10_000 + 1) as i32; + futs.push(self.cl.query(&self.world, &[&w_id])); + } + + async move { + let mut worlds: Vec = Vec::with_capacity(num as usize); + for q in futs { + let rows = q.await.unwrap(); + worlds.push(World { id: rows[0].get(0), randomnumber: rows[0].get(1), - } - })); + }) + } + worlds } - - worlds.collect() } pub fn update(&self, num: u16) -> impl Future> { - let worlds = FuturesUnordered::new(); + let mut futs = Vec::with_capacity(num as usize); let mut rng = self.rng.borrow_mut(); for _ in 0..num { - let id = (rng.get_u32() % 10_000 + 1) as i32; - let w_id = (rng.get_u32() % 10_000 + 1) as i32; - worlds.push(self.cl.query(&self.world, &[&w_id]).map(move |res| { + let id = (rng.generate::() % 10_000 + 1) as i32; + let w_id = (rng.generate::() % 10_000 + 1) as i32; + futs.push(self.cl.query(&self.world, &[&w_id]).map(move |res| { let rows = res.unwrap(); World { id: rows[0].get(0), @@ -115,7 +119,10 @@ impl PgConnection { let cl = self.cl.clone(); let st = self.updates[(num as usize) - 1].clone(); async move { - let worlds: Vec = worlds.collect().await; + let mut worlds: Vec = Vec::with_capacity(num as usize); + for q in futs { + worlds.push(q.await); + } let mut params: Vec<&dyn ToSql> = Vec::with_capacity(num as usize * 3); for w in &worlds { diff --git a/frameworks/Rust/ntex/src/main.rs b/frameworks/Rust/ntex/src/main.rs index d8340fea199..e6bdb80023d 100644 --- a/frameworks/Rust/ntex/src/main.rs +++ b/frameworks/Rust/ntex/src/main.rs @@ -1,5 +1,5 @@ #[global_allocator] -static GLOBAL: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc; +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; use bytes::Bytes; use ntex::http::header::{HeaderValue, CONTENT_TYPE, SERVER}; diff --git a/frameworks/Rust/ntex/src/main_db.rs b/frameworks/Rust/ntex/src/main_db.rs index 3a8250d264e..5117eaf1e21 100644 --- a/frameworks/Rust/ntex/src/main_db.rs +++ b/frameworks/Rust/ntex/src/main_db.rs @@ -103,7 +103,6 @@ async fn main() -> std::io::Result<()> { .h1(AppFactory) .tcp() })? - .workers((num_cpus::get() as f32 * 1.2) as usize) .start() .await } diff --git a/frameworks/Rust/salvo/Cargo.lock b/frameworks/Rust/salvo/Cargo.lock new file mode 100644 index 00000000000..bb09dcb8d5c --- /dev/null +++ b/frameworks/Rust/salvo/Cargo.lock @@ -0,0 +1,1893 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "aho-corasick" +version = "0.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1" + +[[package]] +name = "async-trait" +version = "0.1.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d3a45e77e34375a7923b1e8febb049bb011f064714a8e17a1a616fef01da13d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "base-x" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4521f3e3d031370679b3b140beb36dfe4801b09ac77e30c61941f97df3ef28b" + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "base64" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" + +[[package]] +name = "bitflags" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding", + "byte-tools", + "byteorder", + "generic-array 0.12.4", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array 0.14.4", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "buf-min" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322a0cd4f908663608c3a814c405e40de60d58471b01b1169be39a4f9826e565" +dependencies = [ + "bytes 0.5.6", +] + +[[package]] +name = "bumpalo" +version = "3.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63396b8a4b9de3f4fdfb320ab6080762242f66a8ef174c49d8e19b674db4cdbe" + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" + +[[package]] +name = "bytes" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" + +[[package]] +name = "bytes" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b700ce4376041dcd0a327fd0097c41095743c4c8af8887265942faf1100bd040" + +[[package]] +name = "cc" +version = "1.0.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cmake" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb6210b637171dfba4cda12e579ac6dc73f5165ad56133e5d72ef3131f320855" +dependencies = [ + "cc", +] + +[[package]] +name = "const_fn" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b9d6de7f49e22cf97ad17fc4036ece69300032f45f78f30b4a4482cdc3f4a6" + +[[package]] +name = "cookie" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf8865bac3d9a3bde5bde9088ca431b11f5d37c7a578b8086af77248b76627" +dependencies = [ + "percent-encoding", + "time 0.2.25", + "version_check 0.9.2", +] + +[[package]] +name = "cpuid-bool" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8aebca1129a03dc6dc2b127edd729435bbc4a37e1d5f4d7513165089ceb02634" + +[[package]] +name = "crypto-mac" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4857fd85a0c34b3c3297875b747c1e02e06b6a0ea32dd892d8192b9ce0813ea6" +dependencies = [ + "generic-array 0.14.4", + "subtle", +] + +[[package]] +name = "diesel" +version = "1.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2de9deab977a153492a1468d1b1c0662c1cf39e5ea87d0c060ecd59ef18d8c" +dependencies = [ + "bitflags", + "byteorder", + "diesel_derives", + "pq-sys", + "r2d2", +] + +[[package]] +name = "diesel_derives" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.4", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array 0.14.4", +] + +[[package]] +name = "discard" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0" + +[[package]] +name = "display_bytes" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa333c37992bc9a2ca345cac27e7f150e97740a92daaeb488c98292d83329803" + +[[package]] +name = "double-checked-cell-async" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f22db4315075554331c4976a70d10d2d07192d1df01e9e68553bb632c3fa157" +dependencies = [ + "futures-util", + "unreachable", + "void", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +dependencies = [ + "matches", + "percent-encoding", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futures" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f55667319111d593ba876406af7c409c0ebb44dc4be6132a783ccf163ea14c1" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c2dd2df839b57db9ab69c2c9d8f3e8c81984781937fe2807dc6dcf3b2ad2939" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15496a72fabf0e62bdc3df11a59a3787429221dd0710ba8ef163d6f7a9112c94" + +[[package]] +name = "futures-executor" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891a4b7b96d84d5940084b2a37632dd65deeae662c114ceaa2c879629c9c0ad1" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71c2c65c57704c32f5241c1223167c2c3294fd34ac020c807ddbe6db287ba59" + +[[package]] +name = "futures-macro" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea405816a5139fb39af82c2beb921d52143f556038378d6db21183a5c37fbfb7" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85754d98985841b7d4f5e8e6fbfa4a4ac847916893ec511a2917ccd8525b8bb3" + +[[package]] +name = "futures-task" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa189ef211c15ee602667a6fcfe1c1fd9e07d42250d2156382820fba33c9df80" + +[[package]] +name = "futures-util" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1812c7ab8aedf8d6f2701a43e1243acdbcc2b36ab26e2ad421eb99ac963d96d1" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "proc-macro-hack", + "proc-macro-nested", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] + +[[package]] +name = "generic-array" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" +dependencies = [ + "typenum", + "version_check 0.9.2", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.10.2+wasi-snapshot-preview1", +] + +[[package]] +name = "h2" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d832b01df74254fe364568d6ddc294443f61cbec82816b60904303af87efae78" +dependencies = [ + "bytes 1.0.1", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" + +[[package]] +name = "headers" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62689dc57c7456e69712607ffcbd0aa1dfcccf9af73727e9b25bc1825375cac3" +dependencies = [ + "base64 0.13.0", + "bitflags", + "bytes 1.0.1", + "headers-core", + "http", + "mime", + "sha-1", + "time 0.1.43", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + +[[package]] +name = "hermit-abi" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c" +dependencies = [ + "libc", +] + +[[package]] +name = "hmac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" +dependencies = [ + "crypto-mac", + "digest 0.9.0", +] + +[[package]] +name = "http" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747" +dependencies = [ + "bytes 1.0.1", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2861bd27ee074e5ee891e8b539837a9430012e249d7f0ca2d795650f579c1994" +dependencies = [ + "bytes 1.0.1", + "http", +] + +[[package]] +name = "httparse" +version = "1.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "615caabe2c3160b313d52ccc905335f4ed5f10881dd63dc5699d47e90be85691" + +[[package]] +name = "httpdate" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" + +[[package]] +name = "hyper" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8e946c2b1349055e0b72ae281b238baf1a3ea7307c7e9f9d64673bdd9c26ac7" +dependencies = [ + "bytes 1.0.1", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "indexmap" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb1fa934250de4de8aef298d81c729a7d33d8c239daa3a7575e6b92bfc7313b" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "instant" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "itoa" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "265d751d31d6780a3f956bb5b8022feba2d94eeee5a84ba64f4212eedca42213" + +[[package]] +name = "lock_api" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "markup" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bee7fad13e6c55d2377028995cb110219efd1229258ccd1ab78252525c78397" +dependencies = [ + "itoa", + "markup-proc-macro", +] + +[[package]] +name = "markup-proc-macro" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ea1b5715e5e17763fe26db0f8edea37be59639674a006dd0513806f7f2efe6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "matches" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + +[[package]] +name = "memchr" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" + +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" + +[[package]] +name = "mime_guess" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "mio" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5dede4e2065b3842b8b0af444119f3aa331cc7cc2dd20388bfb0f5d5a38823a" +dependencies = [ + "libc", + "log", + "miow", + "ntapi", + "winapi", +] + +[[package]] +name = "miow" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897" +dependencies = [ + "socket2", + "winapi", +] + +[[package]] +name = "multimap" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1255076139a83bb467426e7f8d0134968a8118844faa755985e077cf31850333" +dependencies = [ + "serde", +] + +[[package]] +name = "nom" +version = "4.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" +dependencies = [ + "memchr", + "version_check 0.1.5", +] + +[[package]] +name = "ntapi" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" +dependencies = [ + "winapi", +] + +[[package]] +name = "num_cpus" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea78b9742c52ac729753c1590e9adc5248ea9bdaf974597efd46c74cfaa5fb54" + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "parking_lot" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "percent-encoding" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" + +[[package]] +name = "phf" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96fa8ebb90271c4477f144354485b8068bd8f6b78b428b01ba892ca26caf0b63" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cf491442e4b033ed1c722cb9f0df5fcfcf4de682466c46469c36bc47dc5548a" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "postgres-protocol" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e34ad3dc5c56d036b9418185ee97e14b6766d55c8ccf9dc18302ad4e6371d9" +dependencies = [ + "base64 0.13.0", + "byteorder", + "bytes 1.0.1", + "fallible-iterator", + "hmac", + "md5", + "memchr", + "rand 0.8.3", + "sha2", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5493d9d4613b88b12433aa12890e74e74cd93fdc1e08b7c2aed4768aaae8414c" +dependencies = [ + "bytes 1.0.1", + "fallible-iterator", + "postgres-protocol", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" + +[[package]] +name = "pq-sys" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac25eee5a0582f45a67e837e350d784e7003bd29a5f460796772061ca49ffda" +dependencies = [ + "vcpkg", +] + +[[package]] +name = "proc-macro-crate" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fdbd1df62156fbc5945f4762632564d7d038153091c3fcf1067f6aef7cff92" +dependencies = [ + "thiserror", + "toml", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" + +[[package]] +name = "proc-macro-nested" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" + +[[package]] +name = "proc-macro2" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "proc-quote" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e84ab161de78c915302ca325a19bee6df272800e2ae1a43fe3ef430bab2a100" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "proc-quote-impl", + "quote", + "syn", +] + +[[package]] +name = "proc-quote-impl" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fb3ec628b063cdbcf316e06a8b8c1a541d28fa6c0a8eacd2bfb2b7f49e88aa0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", +] + +[[package]] +name = "quote" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r2d2" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "545c5bc2b880973c9c10e4067418407a0ccaa3091781d1671d46eb35107cb26f" +dependencies = [ + "log", + "parking_lot", + "scheduled-thread-pool", +] + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc 0.2.0", +] + +[[package]] +name = "rand" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e" +dependencies = [ + "libc", + "rand_chacha 0.3.0", + "rand_core 0.6.2", + "rand_hc 0.3.0", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.2", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" +dependencies = [ + "getrandom 0.2.2", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_hc" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73" +dependencies = [ + "rand_core 0.6.2", +] + +[[package]] +name = "random-fast-rng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95784e84a571f44dc7fd4f0d3ab3f493d3c07b90d695450890490a89d83905a3" +dependencies = [ + "random-trait", +] + +[[package]] +name = "random-trait" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d3186af2e04abe646626095b37293d7485027e8591c56430dfda49894a28447" + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", +] + +[[package]] +name = "regex-syntax" +version = "0.6.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver", +] + +[[package]] +name = "ryu" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" + +[[package]] +name = "salvo" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5a6e19f341aaa7ae6ac7085308eee05058f071930a7b0e04f5c37fa9b92bff5" +dependencies = [ + "salvo_core", +] + +[[package]] +name = "salvo_core" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c908af873845b122095f6412c090514bd585a6d69e89ba4045c814148e00d039" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.13.0", + "bitflags", + "bytes 1.0.1", + "cookie", + "display_bytes", + "double-checked-cell-async", + "form_urlencoded", + "futures", + "futures-util", + "headers", + "http", + "httparse", + "hyper", + "mime", + "mime_guess", + "multimap", + "num_cpus", + "once_cell", + "percent-encoding", + "pin-utils", + "rand 0.8.3", + "regex", + "salvo_macros", + "serde", + "serde_json", + "tempdir", + "textnonce", + "thiserror", + "time 0.2.25", + "tokio", + "tracing", + "tracing-futures", + "twoway", +] + +[[package]] +name = "salvo_macros" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9570eeda89fd2a69530b89be0abfd9672aec14692be9bf693410746e04a6bf18" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "proc-quote", + "syn", +] + +[[package]] +name = "salvo_test" +version = "0.6.5" +dependencies = [ + "anyhow", + "diesel", + "futures", + "hyper", + "markup", + "once_cell", + "rand 0.8.3", + "random-fast-rng", + "salvo", + "serde", + "serde_derive", + "serde_json", + "smallvec", + "snmalloc-rs", + "tokio", + "tokio-postgres", + "v_htmlescape", +] + +[[package]] +name = "scheduled-thread-pool" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc6f74fd1204073fa02d5d5d68bec8021be4c38690b61264b2fdb48083d0e7d7" +dependencies = [ + "parking_lot", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + +[[package]] +name = "serde" +version = "1.0.125" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.125" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha-1" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug 0.2.3", +] + +[[package]] +name = "sha1" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" + +[[package]] +name = "sha2" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa827a14b29ab7f44778d14a88d3cb76e949c45083f7dbfa507d0cb699dc12de" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpuid-bool", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "signal-hook-registry" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16f1d0fef1604ba8f7a073c7e701f213e056707210e9020af4528e0101ce11a6" +dependencies = [ + "libc", +] + +[[package]] +name = "siphasher" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa8f3741c7372e75519bd9346068370c9cdaabcc1f9599cbcf2a2719352286b7" + +[[package]] +name = "slab" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" + +[[package]] +name = "smallvec" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" + +[[package]] +name = "snmalloc-rs" +version = "0.2.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ef8b05b3ebb9f8ca192bcaee085f57dec601083e90f88bcaf84c2c05c5831a0" +dependencies = [ + "snmalloc-sys", +] + +[[package]] +name = "snmalloc-sys" +version = "0.2.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae09a6102d7aa1f7263eefa5a75ea310fe352f9cc637e633b0e49d853cf8d0f" +dependencies = [ + "cmake", + "libc", +] + +[[package]] +name = "socket2" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" +dependencies = [ + "cfg-if", + "libc", + "winapi", +] + +[[package]] +name = "standback" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2beb4d1860a61f571530b3f855a1b538d0200f7871c63331ecd6f17b1f014f8" +dependencies = [ + "version_check 0.9.2", +] + +[[package]] +name = "stdweb" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d022496b16281348b52d0e30ae99e01a73d737b2f45d38fed4edf79f9325a1d5" +dependencies = [ + "discard", + "rustc_version", + "stdweb-derive", + "stdweb-internal-macros", + "stdweb-internal-runtime", + "wasm-bindgen", +] + +[[package]] +name = "stdweb-derive" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "serde_derive", + "syn", +] + +[[package]] +name = "stdweb-internal-macros" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11" +dependencies = [ + "base-x", + "proc-macro2", + "quote", + "serde", + "serde_derive", + "serde_json", + "sha1", + "syn", +] + +[[package]] +name = "stdweb-internal-runtime" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0" + +[[package]] +name = "stringprep" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "subtle" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" + +[[package]] +name = "syn" +version = "1.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c700597eca8a5a762beb35753ef6b94df201c81cca676604f547495a0d7f0081" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand 0.4.6", + "remove_dir_all", +] + +[[package]] +name = "textnonce" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f8d70cd784ed1dc33106a18998d77758d281dc40dc3e6d050cf0f5286683" +dependencies = [ + "base64 0.12.3", + "rand 0.7.3", +] + +[[package]] +name = "thiserror" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" +dependencies = [ + "once_cell", +] + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "time" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1195b046942c221454c2539395f85413b33383a067449d78aab2b7b052a142f7" +dependencies = [ + "const_fn", + "libc", + "standback", + "stdweb", + "time-macros", + "version_check 0.9.2", + "winapi", +] + +[[package]] +name = "time-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957e9c6e26f12cb6d0dd7fc776bb67a706312e7299aed74c8dd5b17ebb27e2f1" +dependencies = [ + "proc-macro-hack", + "time-macros-impl", +] + +[[package]] +name = "time-macros-impl" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5c3be1edfad6027c69f5491cf4cb310d1a71ecd6af742788c6ff8bced86b8fa" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "standback", + "syn", +] + +[[package]] +name = "tinyvec" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "tokio" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8190d04c665ea9e6b6a0dc45523ade572c088d2e6566244c1122671dbf4ae3a" +dependencies = [ + "autocfg", + "bytes 1.0.1", + "libc", + "memchr", + "mio", + "num_cpus", + "once_cell", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "tokio-macros", + "winapi", +] + +[[package]] +name = "tokio-macros" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caf7b11a536f46a809a8a9f0bb4237020f70ecbf115b842360afb127ea2fda57" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-postgres" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cc9f82c2bfb06a33dd0dfb44b07ca98fe72df19e681d80c78d05a1bac2138e2" +dependencies = [ + "async-trait", + "byteorder", + "bytes 1.0.1", + "fallible-iterator", + "futures", + "log", + "parking_lot", + "percent-encoding", + "phf", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "socket2", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-util" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebb7cb2f00c5ae8df755b252306272cd1790d39728363936e01827e11f0b017b" +dependencies = [ + "bytes 1.0.1", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" +dependencies = [ + "serde", +] + +[[package]] +name = "tower-service" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" + +[[package]] +name = "tracing" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8a9bd1db7706f2373a190b0d067146caa39350c486f3d455b0e33b431f94c07" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "try-lock" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" + +[[package]] +name = "twoway" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b40075910de3a912adbd80b5d8bad6ad10a23eeb1f5bf9d4006839e899ba5bc" +dependencies = [ + "memchr", + "unchecked-index", +] + +[[package]] +name = "typenum" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" + +[[package]] +name = "unchecked-index" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eeba86d422ce181a719445e51872fa30f1f7413b62becb52e95ec91aa262d85c" + +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +dependencies = [ + "version_check 0.9.2", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +dependencies = [ + "matches", +] + +[[package]] +name = "unicode-normalization" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-xid" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" + +[[package]] +name = "unreachable" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" +dependencies = [ + "void", +] + +[[package]] +name = "v_escape" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b589fdb24627ef8a81f26da77350ebcb0494547bfe074cda878aff9941edb13" +dependencies = [ + "buf-min", + "v_escape_derive", +] + +[[package]] +name = "v_escape_derive" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c860ad1273f4eee7006cee05db20c9e60e5d24cba024a32e1094aa8e574f3668" +dependencies = [ + "nom", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "v_htmlescape" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6c364b6fad8be5fd1c01993c4c7217491e2822edec75817ae2c3ac9f3926c82" +dependencies = [ + "cfg-if", + "v_escape", +] + +[[package]] +name = "vcpkg" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" + +[[package]] +name = "version_check" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" + +[[package]] +name = "version_check" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" + +[[package]] +name = "void" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasm-bindgen" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ee1280240b7c461d6a0071313e08f34a60b0365f14260362e5a2b17d1d31aa7" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b7d8b6942b8bb3a9b0e73fc79b98095a27de6fa247615e59d096754a3bc2aa8" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ac38da8ef716661f0f36c0d8320b89028efe10c7c0afde65baffb496ce0d3b" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc053ec74d454df287b9374ee8abb36ffd5acb95ba87da3ba5b7d3fe20eb401e" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/frameworks/Rust/salvo/Cargo.toml b/frameworks/Rust/salvo/Cargo.toml index b653e2bf7c6..9bc401f4401 100644 --- a/frameworks/Rust/salvo/Cargo.toml +++ b/frameworks/Rust/salvo/Cargo.toml @@ -17,30 +17,24 @@ path = "src/main_pg.rs" [dependencies] anyhow = "1.0.38" -askama = "0.8" -diesel = { version = "1.4.3", features = ["postgres", "r2d2"] } -salvo = { version = "0.7.0", features = ["anyhow"] } -tokio = { version = "1.2", features = ["full"] } +diesel = { version = "1.4", features = ["postgres", "r2d2"] } +salvo = { version = "0.11", features = ["anyhow"] } +tokio = { version = "1", features = ["full"] } snmalloc-rs = { version = "0.2.24", features = ["1mib", "native-cpu"] } random-fast-rng = "0.1.1" -futures = "0.3.12" +futures = "0.3" smallvec = "1.6.1" -simd-json = "0.3" -simd-json-derive = "0.1.15" serde = { version = "1.0", features = ["derive"] } tokio-postgres = "0.7" -yarte = { version = "0.15", features = ["bytes-buf"] } once_cell = "1.5.2" rand = { version = "0.8.3", features = ["small_rng"] } -hyper = "0.14.4" +hyper = "0.14" +markup = "0.12" +v_htmlescape = "0.13.0" +serde_json = "1.0.64" +serde_derive = "1.0.125" [profile.release] lto = true opt-level = 3 codegen-units = 1 - -# [patch.crates-io] -# salvo = { path = "D:/Kenorld/salvo-rs/salvo" } -# salvo_core = { path = "D:/Kenorld/salvo-rs/salvo/core" } -# salvo_macros = { path = "D:/Kenorld/salvo-rs/salvo/macros" } -# salvo_extra = { path = "D:/Kenorld/salvo-rs/salvo/extra" } diff --git a/frameworks/Rust/salvo/benchmark_config.json b/frameworks/Rust/salvo/benchmark_config.json index 592821a4967..4f840db0321 100644 --- a/frameworks/Rust/salvo/benchmark_config.json +++ b/frameworks/Rust/salvo/benchmark_config.json @@ -13,8 +13,8 @@ "language": "Rust", "flavor": "None", "orm": "Micro", - "platform": "None", - "webserver": "None", + "platform": "Rust", + "webserver": "Hyper", "os": "Linux", "database_os": "Linux", "display_name": "Salvo", @@ -33,8 +33,8 @@ "framework": "salvo", "language": "Rust", "orm": "Raw", - "platform": "None", - "webserver": "None", + "platform": "Rust", + "webserver": "Hyper", "os": "Linux", "database_os": "Linux", "display_name": "Salvo [Diesel]", @@ -53,8 +53,8 @@ "framework": "salvo", "language": "Rust", "orm": "Raw", - "platform": "None", - "webserver": "None", + "platform": "Rust", + "webserver": "Hyper", "os": "Linux", "database_os": "Linux", "display_name": "Salvo [PG]", diff --git a/frameworks/Rust/salvo/salvo-db.dockerfile b/frameworks/Rust/salvo/salvo-db.dockerfile index a2c63b5a792..d6c2b236e74 100644 --- a/frameworks/Rust/salvo/salvo-db.dockerfile +++ b/frameworks/Rust/salvo/salvo-db.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.50 +FROM rust:1.51 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/salvo/salvo-pg.dockerfile b/frameworks/Rust/salvo/salvo-pg.dockerfile index 64b10a08653..6013f61247a 100644 --- a/frameworks/Rust/salvo/salvo-pg.dockerfile +++ b/frameworks/Rust/salvo/salvo-pg.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.50 +FROM rust:1.51 RUN apt-get update -yqq && apt-get install -yqq cmake g++ diff --git a/frameworks/Rust/salvo/salvo.dockerfile b/frameworks/Rust/salvo/salvo.dockerfile index 9be27fa3807..82e24d685b5 100644 --- a/frameworks/Rust/salvo/salvo.dockerfile +++ b/frameworks/Rust/salvo/salvo.dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.50 +FROM rust:1.51 # Disable simd at jsonescape ENV CARGO_CFG_JSONESCAPE_DISABLE_AUTO_SIMD= diff --git a/frameworks/Rust/salvo/src/main.rs b/frameworks/Rust/salvo/src/main.rs index 63a720a4e5d..c2c1fe7bef9 100644 --- a/frameworks/Rust/salvo/src/main.rs +++ b/frameworks/Rust/salvo/src/main.rs @@ -1,9 +1,12 @@ #[global_allocator] static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc; +#[macro_use] +extern crate serde_derive; +extern crate serde_json; + use salvo::http::header::{self, HeaderValue}; use salvo::prelude::*; -use simd_json_derive::Serialize; use hyper::server::conn::AddrIncoming; static HELLO_WORLD: &'static [u8] = b"Hello, world!"; @@ -15,10 +18,9 @@ pub struct Message { #[fn_handler] async fn json(res: &mut Response) { res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - let msg = Message { + res.render_json(&Message { message: "Hello, World!", - }; - res.render_binary(HeaderValue::from_static("application/json"), &msg.json_vec().unwrap()); + }); } #[fn_handler] @@ -37,5 +39,5 @@ async fn main() { let mut incoming = AddrIncoming::bind(&(([0, 0, 0, 0], 8080)).into()).unwrap(); incoming.set_nodelay(true); - Server::builder(incoming).http1_pipeline_flush(true).serve(Service::new(router)).await.unwrap(); + salvo::server::builder(incoming).http1_pipeline_flush(true).serve(Service::new(router)).await.unwrap(); } diff --git a/frameworks/Rust/salvo/src/main_db.rs b/frameworks/Rust/salvo/src/main_db.rs index 32393a0ac22..f03b5326966 100644 --- a/frameworks/Rust/salvo/src/main_db.rs +++ b/frameworks/Rust/salvo/src/main_db.rs @@ -5,7 +5,6 @@ static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc; extern crate diesel; use anyhow::Error; -use askama::Template; use diesel::prelude::*; use diesel::r2d2::{ConnectionManager, Pool, PoolError, PooledConnection}; use once_cell::sync::OnceCell; @@ -13,7 +12,8 @@ use rand::rngs::SmallRng; use rand::{Rng, SeedableRng}; use salvo::http::header::{self, HeaderValue}; use salvo::prelude::*; -use std::{cmp, io}; +use std::cmp; +use std::fmt::Write; mod models; mod schema; @@ -91,31 +91,47 @@ async fn updates(req: &mut Request, res: &mut Response) -> Result<(), Error> { Ok(()) } -#[derive(Template)] -#[template(path = "fortune.html")] -struct FortuneTemplate<'a> { - items: &'a Vec, -} - #[fn_handler] async fn fortunes(_req: &mut Request, res: &mut Response) -> Result<(), Error> { + let mut items = vec![Fortune { + id: 0, + message: "Additional fortune added at request time.".to_string(), + }]; + let conn = connect()?; - let rows = match fortune::table.get_results::(&conn) { - Ok(mut items) => { - items.push(Fortune { - id: 0, - message: "Additional fortune added at request time.".to_string(), - }); - items.sort_by(|it, next| it.message.cmp(&next.message)); - Ok(items) - } - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)), - }?; - let tmpl = FortuneTemplate { items: &rows }; + for item in fortune::table.get_results::(&conn)? { + items.push(item); + } + items.sort_by(|it, next| it.message.cmp(&next.message)); + + let mut body = String::new(); + write!(&mut body, "{}", FortunesTemplate { items }).unwrap(); + res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_html_text(&tmpl.render().unwrap()); + res.render_html_text(&body); Ok(()) } +markup::define! { + FortunesTemplate(items: Vec) { + {markup::doctype()} + html { + head { + title { "Fortunes" } + } + body { + table { + tr { th { "id" } th { "message" } } + @for item in items { + tr { + td { {item.id} } + td { {markup::raw(v_htmlescape::escape(&item.message).to_string())} } + } + } + } + } + } + } +} #[tokio::main] async fn main() { diff --git a/frameworks/Rust/salvo/src/main_pg.rs b/frameworks/Rust/salvo/src/main_pg.rs index d535888a608..6abcb1c16cf 100644 --- a/frameworks/Rust/salvo/src/main_pg.rs +++ b/frameworks/Rust/salvo/src/main_pg.rs @@ -1,21 +1,18 @@ #[global_allocator] static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc; +use std::collections::HashMap; +use std::fmt::Write; +use std::{cmp, io}; + use anyhow::Error; -use futures::{pin_mut, TryStreamExt}; use once_cell::sync::OnceCell; use rand::rngs::SmallRng; use rand::{Rng, SeedableRng}; use salvo::http::header::{self, HeaderValue}; use salvo::prelude::*; -use serde::Serialize; -use std::borrow::Cow; -use std::collections::HashMap; -use std::fmt::Write; -use std::{cmp, io}; use tokio_postgres::types::ToSql; use tokio_postgres::{self, Client, NoTls, Statement}; -use yarte::ywrite_html; mod models; use models::*; @@ -32,12 +29,6 @@ pub struct PgConnection { updates: HashMap, } -#[derive(Serialize, Debug)] -pub struct CowFortune { - pub id: i32, - pub message: Cow<'static, str>, -} - impl PgConnection { pub async fn connect(db_url: &str) -> Result { let (client, conn) = tokio_postgres::connect(db_url, NoTls) @@ -49,7 +40,7 @@ impl PgConnection { } }); - let fortune = client.prepare("SELECT * FROM fortune").await.unwrap(); + let fortune = client.prepare("SELECT id, message FROM fortune").await.unwrap(); let world = client.prepare("SELECT * FROM world WHERE id=$1").await.unwrap(); let mut updates_statements = HashMap::new(); for num in 1..=500u16 { @@ -152,43 +143,53 @@ async fn updates(req: &mut Request, res: &mut Response) -> Result<(), Error> { #[fn_handler] async fn fortunes(_req: &mut Request, res: &mut Response) -> Result<(), Error> { - let mut items = vec![CowFortune { - id: 0, - message: Cow::Borrowed("Additional fortune added at request time."), - }]; let conn = connect(); - let params: [&str; 0] = []; - let stream = conn.client.query_raw(&conn.fortune, ¶ms).await?; - pin_mut!(stream); - - while let Some(row) = stream.try_next().await? { - items.push(CowFortune { - id: row.get(0), - message: Cow::Owned(row.get(1)), - }); - } - + let mut items = conn.client.query(&conn.fortune, &[]).await?.iter().map(|row|Fortune { + id: row.get(0), + message: row.get(1), + }).collect::>(); + items.push(Fortune { + id: 0, + message: "Additional fortune added at request time.".to_string(), + }); items.sort_by(|it, next| it.message.cmp(&next.message)); - - let mut body = Vec::with_capacity(2048); - ywrite_html!(body, "{{> fortune }}"); + let mut body = String::with_capacity(2048); + write!(&mut body, "{}", FortunesTemplate { items }).unwrap(); res.headers_mut().insert(header::SERVER, HeaderValue::from_static("S")); - res.render_binary("text/html; charset=utf-8".parse().unwrap(), &body); + res.render_html_text(&body); Ok(()) } +markup::define! { + FortunesTemplate(items: Vec) { + {markup::doctype()} + html { + head { + title { "Fortunes" } + } + body { + table { + tr { th { "id" } th { "message" } } + @for item in items { + tr { + td { {item.id} } + td { {markup::raw(v_htmlescape::escape(&item.message).to_string())} } + } + } + } + } + } + } +} #[tokio::main] async fn main() { println!("Started http server: 127.0.0.1:8080"); - DB_CONN - .set( - PgConnection::connect(DB_URL) - .await - .expect(&format!("Error connecting to {}", &DB_URL)), - ) - .ok(); + let conn = PgConnection::connect(DB_URL) + .await + .expect(&format!("Error connecting to {}", &DB_URL)); + DB_CONN.set(conn).ok(); let router = Router::new() .push(Router::new().path("db").get(world_row)) .push(Router::new().path("fortunes").get(fortunes)) diff --git a/frameworks/Rust/salvo/templates/fortune.html b/frameworks/Rust/salvo/templates/fortune.html deleted file mode 100644 index 7c448b9d999..00000000000 --- a/frameworks/Rust/salvo/templates/fortune.html +++ /dev/null @@ -1,12 +0,0 @@ - - - Fortunes - - - - {% for item in items %} - - {% endfor %} -
idmessage
{{item.id}}{{item.message}}
- - diff --git a/frameworks/Scala/blaze/README.md b/frameworks/Scala/blaze/README.md index 27cce777bdf..9298ce228cf 100644 --- a/frameworks/Scala/blaze/README.md +++ b/frameworks/Scala/blaze/README.md @@ -1,11 +1,5 @@ #blaze Benchmarking Test -## Infrastructure Software Versions -The tests were run with: - -* [Java Oracle 1.8.0](http://www.oracle.com/technetwork/java/javase) -* [blaze 0.14.0-M3](https://github.com/http4s/blaze/) - ## Test URLs ### JSON Encoding Test diff --git a/frameworks/Scala/blaze/blaze.dockerfile b/frameworks/Scala/blaze/blaze.dockerfile index baab66d3e9d..23a039acb7b 100644 --- a/frameworks/Scala/blaze/blaze.dockerfile +++ b/frameworks/Scala/blaze/blaze.dockerfile @@ -1,12 +1,11 @@ -FROM adoptopenjdk/openjdk11:x86_64-alpine-jre-11.0.3_7 -RUN apk add bash +FROM openjdk:15 WORKDIR /blaze COPY project project COPY src src COPY build.sbt build.sbt COPY sbt sbt RUN ./sbt assembly -batch && \ - mv target/scala-2.12/blaze-assembly-1.0.jar . && \ + mv target/blaze-assembly-1.0.jar . && \ rm -Rf target && \ rm -Rf project/target && \ rm -Rf ~/.sbt && \ diff --git a/frameworks/Scala/blaze/build.sbt b/frameworks/Scala/blaze/build.sbt index 9f75d777635..7b0cd7ed2cc 100644 --- a/frameworks/Scala/blaze/build.sbt +++ b/frameworks/Scala/blaze/build.sbt @@ -2,10 +2,12 @@ name := "blaze" version := "1.0" -scalaVersion := "2.12.8" +scalaVersion := "2.13.5" libraryDependencies ++= Seq( - "org.http4s" %% "http4s-blaze-core" % "0.20.3", - "com.github.plokhotnyuk.jsoniter-scala" %% "jsoniter-scala-macros" % "0.51.3", + "org.http4s" %% "blaze-http" % "0.14.16", + "com.github.plokhotnyuk.jsoniter-scala" %% "jsoniter-scala-macros" % "2.7.3", "ch.qos.logback" % "logback-classic" % "1.2.3" ) + +crossPaths := false \ No newline at end of file diff --git a/frameworks/Scala/blaze/project/build.properties b/frameworks/Scala/blaze/project/build.properties index c0bab04941d..f0be67b9f72 100644 --- a/frameworks/Scala/blaze/project/build.properties +++ b/frameworks/Scala/blaze/project/build.properties @@ -1 +1 @@ -sbt.version=1.2.8 +sbt.version=1.5.1 diff --git a/frameworks/Scala/blaze/sbt b/frameworks/Scala/blaze/sbt index 5475171bc5f..8cc9bada4e1 100755 --- a/frameworks/Scala/blaze/sbt +++ b/frameworks/Scala/blaze/sbt @@ -18,10 +18,10 @@ declare -r latest_28="2.8.2" declare -r buildProps="project/build.properties" -declare -r sbt_launch_ivy_release_repo="http://repo.typesafe.com/typesafe/ivy-releases" +declare -r sbt_launch_ivy_release_repo="https://repo.typesafe.com/typesafe/ivy-releases" declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" -declare -r sbt_launch_mvn_release_repo="http://repo.scala-sbt.org/scalasbt/maven-releases" -declare -r sbt_launch_mvn_snapshot_repo="http://repo.scala-sbt.org/scalasbt/maven-snapshots" +declare -r sbt_launch_mvn_release_repo="https://repo.scala-sbt.org/scalasbt/maven-releases" +declare -r sbt_launch_mvn_snapshot_repo="https://repo.scala-sbt.org/scalasbt/maven-snapshots" declare -r default_jvm_opts_common="-Xms512m -Xss2m" declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" @@ -133,6 +133,7 @@ make_url () { 0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; + 1.5.*) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch-$version.jar" ;; *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; esac } diff --git a/frameworks/Scala/blaze/src/main/scala/Main.scala b/frameworks/Scala/blaze/src/main/scala/Main.scala index 9fc709d6880..e062443ea2b 100644 --- a/frameworks/Scala/blaze/src/main/scala/Main.scala +++ b/frameworks/Scala/blaze/src/main/scala/Main.scala @@ -23,7 +23,7 @@ object Main { private val jsonHeaders = Seq("server" -> "blaze", "content-type" -> "application/json") private val plaintextHeaders = Seq("server" -> "blaze", "content-type" -> "text/plain") - private implicit val codec: JsonValueCodec[Message] = JsonCodecMaker.make[Message](CodecMakerConfig()) + private implicit val codec: JsonValueCodec[Message] = JsonCodecMaker.make[Message](CodecMakerConfig) def serve(request: HttpRequest): Future[RouteAction] = Future.successful { request.url match { @@ -37,7 +37,7 @@ object Main { Future.successful(LeafBuilder(new Http1ServerStage(serve, config))) def main(args: Array[String]): Unit = - NIO1SocketServerGroup.fixedGroup() + NIO1SocketServerGroup.fixed() .bind(new InetSocketAddress(8080), connect) .getOrElse(sys.error("Failed to start server.")) .join() diff --git a/frameworks/Scala/finagle/build.sbt b/frameworks/Scala/finagle/build.sbt index 14d960f2eac..ca4d3cee2ed 100644 --- a/frameworks/Scala/finagle/build.sbt +++ b/frameworks/Scala/finagle/build.sbt @@ -1,4 +1,4 @@ -lazy val finagleVersion = "21.3.0" +lazy val finagleVersion = "21.4.0" name := "finagle-benchmark" scalaVersion := "2.12.12" diff --git a/frameworks/Scala/finatra/build.sbt b/frameworks/Scala/finatra/build.sbt index bc5f9c44d27..52f5fb6b807 100644 --- a/frameworks/Scala/finatra/build.sbt +++ b/frameworks/Scala/finatra/build.sbt @@ -1,4 +1,4 @@ -lazy val finatraVersion = "21.3.0" +lazy val finatraVersion = "21.4.0" name := "techempower-benchmarks-finatra" organization := "com.twitter" diff --git a/frameworks/Scala/http4s/build.sbt b/frameworks/Scala/http4s/build.sbt index 22b6ddcfba9..321a80fde88 100644 --- a/frameworks/Scala/http4s/build.sbt +++ b/frameworks/Scala/http4s/build.sbt @@ -2,7 +2,7 @@ name := "http4s" version := "1.0" -scalaVersion := "2.13.3" +scalaVersion := "2.13.5" scalacOptions ++= Seq( "-deprecation", @@ -18,11 +18,11 @@ scalacOptions ++= Seq( enablePlugins(SbtTwirl) -val http4sVersion = "0.21.7" +val http4sVersion = "0.21.22" -assemblyMergeStrategy in assembly := { +assembly / assemblyMergeStrategy := { case PathList(xs @ _*) if xs.last == "io.netty.versions.properties" => MergeStrategy.rename - case other => (assemblyMergeStrategy in assembly).value(other) + case other => (assembly / assemblyMergeStrategy).value(other) } libraryDependencies ++= Seq( @@ -32,11 +32,11 @@ libraryDependencies ++= Seq( "org.http4s" %% "http4s-circe" % http4sVersion, // Optional for auto-derivation of JSON codecs "io.circe" %% "circe-generic" % "0.13.0", - "org.typelevel" %% "cats-effect" % "2.2.0", - "co.fs2" %% "fs2-core" % "2.4.4", - "co.fs2" %% "fs2-io" % "2.4.4", - "io.getquill" %% "quill-jasync-postgres" % "3.5.2", - "io.getquill" %% "quill-jasync" % "3.5.2", + "org.typelevel" %% "cats-effect" % "2.5.0", + "co.fs2" %% "fs2-core" % "2.5.5", + "co.fs2" %% "fs2-io" % "2.5.5", + "io.getquill" %% "quill-jasync-postgres" % "3.7.0", + "io.getquill" %% "quill-jasync" % "3.7.0", "ch.qos.logback" % "logback-classic" % "1.2.3" ) diff --git a/frameworks/Scala/http4s/project/build.properties b/frameworks/Scala/http4s/project/build.properties index 0837f7a132d..f0be67b9f72 100644 --- a/frameworks/Scala/http4s/project/build.properties +++ b/frameworks/Scala/http4s/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.13 +sbt.version=1.5.1 diff --git a/frameworks/Scala/http4s/project/plugins.sbt b/frameworks/Scala/http4s/project/plugins.sbt index fc68c19a6ac..8a0f389d2cd 100644 --- a/frameworks/Scala/http4s/project/plugins.sbt +++ b/frameworks/Scala/http4s/project/plugins.sbt @@ -1,2 +1,2 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") -addSbtPlugin("com.typesafe.sbt" % "sbt-twirl" % "1.5.0") +addSbtPlugin("com.typesafe.sbt" % "sbt-twirl" % "1.5.1") diff --git a/frameworks/Scala/http4s/sbt b/frameworks/Scala/http4s/sbt index d4e9c70cfd2..8cc9bada4e1 100755 --- a/frameworks/Scala/http4s/sbt +++ b/frameworks/Scala/http4s/sbt @@ -133,6 +133,7 @@ make_url () { 0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; + 1.5.*) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch-$version.jar" ;; *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; esac } diff --git a/frameworks/Swift/hummingbird/.gitignore b/frameworks/Swift/hummingbird/.gitignore new file mode 100644 index 00000000000..1d982c0de86 --- /dev/null +++ b/frameworks/Swift/hummingbird/.gitignore @@ -0,0 +1,7 @@ +.DS_Store +.build/ +/*.xcodeproj +xcuserdata/ +.swiftpm/ +DerivedData/ +Package.resolved diff --git a/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile b/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile index 7daa6302343..8559390bc90 100644 --- a/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile +++ b/frameworks/Swift/hummingbird/hummingbird-postgres.dockerfile @@ -10,7 +10,8 @@ COPY ./src-postgres . # Compile with optimizations RUN swift build \ --enable-test-discovery \ - -c release + -c release \ + -Xswiftc -enforce-exclusivity=unchecked # ================================ # Run image diff --git a/frameworks/Swift/hummingbird/hummingbird.dockerfile b/frameworks/Swift/hummingbird/hummingbird.dockerfile index a4d76350078..5e156d2331b 100644 --- a/frameworks/Swift/hummingbird/hummingbird.dockerfile +++ b/frameworks/Swift/hummingbird/hummingbird.dockerfile @@ -10,7 +10,8 @@ COPY ./src . # Compile with optimizations RUN swift build \ --enable-test-discovery \ - -c release + -c release \ + -Xswiftc -enforce-exclusivity=unchecked # ================================ # Run image diff --git a/frameworks/Swift/hummingbird/src-postgres/Package.swift b/frameworks/Swift/hummingbird/src-postgres/Package.swift index 67930f419c9..8eec7f2ece9 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Package.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Package.swift @@ -10,8 +10,8 @@ let package = Package( .executable(name: "server", targets: ["server"]) ], dependencies: [ - .package(url: "https://github.com/hummingbird-project/hummingbird.git", .upToNextMinor(from: "0.7.0")), - .package(url: "https://github.com/hummingbird-project/hummingbird-mustache.git", .upToNextMinor(from: "0.1.0")), + .package(url: "https://github.com/hummingbird-project/hummingbird.git", .upToNextMinor(from: "0.11.0")), + .package(url: "https://github.com/hummingbird-project/hummingbird-mustache.git", .upToNextMinor(from: "0.5.0")), .package(url: "https://github.com/vapor/postgres-kit.git", from: "2.0.0"), ], targets: [ diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift index 5af9fe6a675..9388bf509ce 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/Controllers/FortunesController.swift @@ -15,18 +15,7 @@ class FortunesController { init() { self.template = try! HBMustacheTemplate(string: """ - - - Fortunes - - - - {{#.}} - - {{/.}} -
idmessage
{{id}}{{message}}
- - + Fortunes{{#.}}{{/.}}
idmessage
{{id}}{{message}}
""") } diff --git a/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift b/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift index 22b1a64eba1..932ba72c45d 100644 --- a/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift +++ b/frameworks/Swift/hummingbird/src-postgres/Sources/server/main.swift @@ -8,7 +8,7 @@ extension Int { } } -func runApp() { +func runApp() throws { let env = HBEnvironment() let serverHostName = env.get("SERVER_HOSTNAME") ?? "127.0.0.1" let serverPort = env.get("SERVER_PORT", as: Int.self) ?? 8080 @@ -20,12 +20,12 @@ func runApp() { let app = HBApplication(configuration: configuration) app.encoder = JSONEncoder() app.initConnectionPool() - + WorldController().add(to: app.router) FortunesController().add(to: app.router) - app.start() + try app.start() app.wait() } -runApp() +try runApp() diff --git a/frameworks/Swift/hummingbird/src/Package.swift b/frameworks/Swift/hummingbird/src/Package.swift index a57c9cc8460..7763ba1d564 100644 --- a/frameworks/Swift/hummingbird/src/Package.swift +++ b/frameworks/Swift/hummingbird/src/Package.swift @@ -9,7 +9,7 @@ let package = Package( .executable(name: "server", targets: ["server"]) ], dependencies: [ - .package(url: "https://github.com/hummingbird-project/hummingbird.git", .upToNextMinor(from: "0.7.0")), + .package(url: "https://github.com/hummingbird-project/hummingbird.git", .upToNextMinor(from: "0.11.0")), ], targets: [ .target(name: "server", diff --git a/frameworks/Swift/hummingbird/src/Sources/server/main.swift b/frameworks/Swift/hummingbird/src/Sources/server/main.swift index e462ba944f0..fe437174c06 100644 --- a/frameworks/Swift/hummingbird/src/Sources/server/main.swift +++ b/frameworks/Swift/hummingbird/src/Sources/server/main.swift @@ -5,7 +5,7 @@ struct Object: HBResponseEncodable { let message: String } -func runApp() { +func runApp() throws { let env = HBEnvironment() let serverHostName = env.get("SERVER_HOSTNAME") ?? "127.0.0.1" let serverPort = env.get("SERVER_PORT", as: Int.self) ?? 8080 @@ -17,7 +17,7 @@ func runApp() { ) let app = HBApplication(configuration: configuration) app.encoder = JSONEncoder() - + app.router.get("plaintext") { req in "Hello, world!" } @@ -26,8 +26,8 @@ func runApp() { Object(message: "Hello, world!") } - app.start() + try app.start() app.wait() } -runApp() +try runApp() diff --git a/frameworks/Swift/swift-nio/swift-nio.dockerfile b/frameworks/Swift/swift-nio/swift-nio.dockerfile index a3921e118fb..c372a6f40b2 100644 --- a/frameworks/Swift/swift-nio/swift-nio.dockerfile +++ b/frameworks/Swift/swift-nio/swift-nio.dockerfile @@ -10,7 +10,8 @@ COPY ./app . # Compile with optimizations RUN swift build \ --enable-test-discovery \ - -c release + -c release \ + -Xswiftc -enforce-exclusivity=unchecked # ================================ # Run image diff --git a/frameworks/Swift/vapor/vapor-fluent.dockerfile b/frameworks/Swift/vapor/vapor-fluent.dockerfile index d3205952eea..dc0e231af9c 100644 --- a/frameworks/Swift/vapor/vapor-fluent.dockerfile +++ b/frameworks/Swift/vapor/vapor-fluent.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM vapor/swift:5.2 as build +FROM swift:5.3 as build WORKDIR /build # Copy entire repo into container @@ -10,12 +10,13 @@ COPY ./vapor-fluent . # Compile with optimizations RUN swift build \ --enable-test-discovery \ - -c release + -c release \ + -Xswiftc -enforce-exclusivity=unchecked # ================================ # Run image # ================================ -FROM vapor/ubuntu:18.04 +FROM swift:5.3-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/vapor/vapor-postgres.dockerfile b/frameworks/Swift/vapor/vapor-postgres.dockerfile index 3debdc28e15..150cfe02370 100644 --- a/frameworks/Swift/vapor/vapor-postgres.dockerfile +++ b/frameworks/Swift/vapor/vapor-postgres.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM vapor/swift:5.2 as build +FROM swift:5.3 as build WORKDIR /build # Copy entire repo into container @@ -10,12 +10,13 @@ COPY ./vapor-postgres . # Compile with optimizations RUN swift build \ --enable-test-discovery \ - -c release + -c release \ + -Xswiftc -enforce-exclusivity=unchecked # ================================ # Run image # ================================ -FROM vapor/ubuntu:18.04 +FROM swift:5.3-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/vapor/vapor-sql-kit.dockerfile b/frameworks/Swift/vapor/vapor-sql-kit.dockerfile index f641c480ba7..505dbecd8ab 100644 --- a/frameworks/Swift/vapor/vapor-sql-kit.dockerfile +++ b/frameworks/Swift/vapor/vapor-sql-kit.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM vapor/swift:5.2 as build +FROM swift:5.3 as build WORKDIR /build # Copy entire repo into container @@ -10,12 +10,13 @@ COPY ./vapor-sql-kit . # Compile with optimizations RUN swift build \ --enable-test-discovery \ - -c release + -c release \ + -Xswiftc -enforce-exclusivity=unchecked # ================================ # Run image # ================================ -FROM vapor/ubuntu:18.04 +FROM swift:5.3-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/Swift/vapor/vapor.dockerfile b/frameworks/Swift/vapor/vapor.dockerfile index 9b4b7171484..3c608af111e 100644 --- a/frameworks/Swift/vapor/vapor.dockerfile +++ b/frameworks/Swift/vapor/vapor.dockerfile @@ -1,7 +1,7 @@ # ================================ # Build image # ================================ -FROM vapor/swift:5.2 as build +FROM swift:5.3 as build WORKDIR /build # Copy entire repo into container @@ -10,12 +10,13 @@ COPY ./vapor-default . # Compile with optimizations RUN swift build \ --enable-test-discovery \ - -c release + -c release \ + -Xswiftc -enforce-exclusivity=unchecked # ================================ # Run image # ================================ -FROM vapor/ubuntu:18.04 +FROM swift:5.3-slim WORKDIR /run # Copy build artifacts diff --git a/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile b/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile index 31b81790e91..6adf5b36d9d 100644 --- a/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-fastify-mongo.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile b/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile index 3078c401b53..201d2cd0240 100644 --- a/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-fastify-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-fastify.dockerfile b/frameworks/TypeScript/nest/nestjs-fastify.dockerfile index 8ba171cc04d..07f93c3efa8 100644 --- a/frameworks/TypeScript/nest/nestjs-fastify.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-fastify.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-mongo.dockerfile b/frameworks/TypeScript/nest/nestjs-mongo.dockerfile index a84ec2a46d8..91e1d41a535 100644 --- a/frameworks/TypeScript/nest/nestjs-mongo.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-mongo.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs-mysql.dockerfile b/frameworks/TypeScript/nest/nestjs-mysql.dockerfile index 1f65ce50dc2..4c0c1dabac5 100644 --- a/frameworks/TypeScript/nest/nestjs-mysql.dockerfile +++ b/frameworks/TypeScript/nest/nestjs-mysql.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0-slim COPY ./ ./ diff --git a/frameworks/TypeScript/nest/nestjs.dockerfile b/frameworks/TypeScript/nest/nestjs.dockerfile index 23a52ba3bf9..9ba6569e804 100644 --- a/frameworks/TypeScript/nest/nestjs.dockerfile +++ b/frameworks/TypeScript/nest/nestjs.dockerfile @@ -1,4 +1,4 @@ -FROM node:14.16.0-slim +FROM node:14.17.0-slim COPY ./ ./ diff --git a/toolset/utils/docker_helper.py b/toolset/utils/docker_helper.py index abfa6178462..ed3f1595603 100644 --- a/toolset/utils/docker_helper.py +++ b/toolset/utils/docker_helper.py @@ -184,7 +184,11 @@ def watch_container(docker_container, docker_file): } name = None - sysctl = {'net.core.somaxconn': 65535} + if self.benchmarker.config.network_mode is None: + sysctl = {'net.core.somaxconn': 65535} + else: + # Do not pass `net.*` kernel params when using host network mode + sysctl = None ulimit = [{ 'name': 'nofile', @@ -319,10 +323,16 @@ def start_database(self, database): image_name = "techempower/%s:latest" % database log_prefix = image_name + ": " - sysctl = { - 'net.core.somaxconn': 65535, - 'kernel.sem': "250 32000 256 512" - } + if self.benchmarker.config.network_mode is None: + sysctl = { + 'net.core.somaxconn': 65535, + 'kernel.sem': "250 32000 256 512" + } + else: + # Do not pass `net.*` kernel params when using host network mode + sysctl = { + 'kernel.sem': "250 32000 256 512" + } ulimit = [{'name': 'nofile', 'hard': 65535, 'soft': 65535}] @@ -401,7 +411,11 @@ def watch_container(container): for line in container.logs(stream=True): log(line, file=benchmark_file) - sysctl = {'net.core.somaxconn': 65535} + if self.benchmarker.config.network_mode is None: + sysctl = {'net.core.somaxconn': 65535} + else: + # Do not pass `net.*` kernel params when using host network mode + sysctl = None ulimit = [{'name': 'nofile', 'hard': 65535, 'soft': 65535}]