diff --git a/.gitignore b/.gitignore index a0d761fb..3561c1bd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ .idea/** *.iml +content/Apache/Wayang/**/libs/** + # Created by .ignore support plugin (hsz.mobi) ### Maven template target/ @@ -135,3 +137,10 @@ fabric.properties .history .vscode .classpath +content/Apache/Wayang/.DS_Store +content/Apache/Wayang/quickstart-guide/.DS_Store +content/Apache/Wayang/quickstart-guide/libs/.DS_Store + +/tools/repo-doc-analysis/docs-main/developer.gov.sg/ +/tools/repo-doc-analysis/docs-main/incubator-wayang/ +/tools/repo-doc-analysis/docs-main/kafka diff --git a/content/Apache/Wayang/pom.xml b/content/Apache/Wayang/pom.xml new file mode 100644 index 00000000..81152f45 --- /dev/null +++ b/content/Apache/Wayang/pom.xml @@ -0,0 +1,40 @@ + + + + + 4.0.0 + + + org.apache.training.content + training-content-apache + 0.4.0-SNAPSHOT + + + training-content-apache-wayang + pom + + Training: Content: Apache Wayang + + + quickstart-guide + + + diff --git a/content/Apache/Wayang/quickstart-guide/README.md b/content/Apache/Wayang/quickstart-guide/README.md new file mode 100644 index 00000000..1845bc1f --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/README.md @@ -0,0 +1,66 @@ + + +# Presentation with Reveal.JS and AsciiDoctor + +Remarks: +- Currently it seems as if the system can't detect the 'docinfo' files, so we have to replace the `document.html.slim` file from `asciidoctor-reveal.js` with an updated one, that adds some additional js and css references. This template is located in `libs/docinfo-hack`. +- In order to use the preview of the IntelliJ asciidoctor plugin, you need to set an attribute in the plugin settings: `imagesdir` = `../resources/images` +- Even if it is possible to run the presentation directly from the `generated-slides` directory, some JavaScript extensions don't work in this case. Therefore it is required to run the presentation from a local webserver. +- In order to generate the diagrams, GraphVIS needs to be installed locally. Get it from: http://www.graphviz.org/ +- The template is adjusted to use the codecentric font `Panton`, so be sure to have that installed on your presentation machine. +- Any css adjustments can go to `src/main/theme/cc.css` as this is automatically embedded into the themes directory. + +## Building the presentation + +By running the following command, you can generate the presentation: + + mvn clean package + +## Running the presentation + +In order to start a local web server serving the presentation, execute the following command: + + mvn jetty:run-exploded + +As soon as that's done, just point your browser to: + + http://localhost:8080/ + +## Generating PDF versions + +In order to generate a PDF version of the presentation just add `?print-pdf` to the url. (Keep in mind, that you have to add it before any `#blahblah`) + +The following link should do the trick: + + http://localhost:8080/?print-pdf + +As soon as that's loaded, just use the normal `print` functionality of the browser and `print as PDF`. + +## Installing third party software: + +### Mermaid + + npm install mermaid.cli + +This will install mermaid under `node_modules/.bin/mmdc`. + +### PhantomJS + +https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-macosx.zip diff --git a/content/Apache/Wayang/quickstart-guide/install-deps-centos.sh b/content/Apache/Wayang/quickstart-guide/install-deps-centos.sh new file mode 100644 index 00000000..31f03235 --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/install-deps-centos.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + + +############# install necessary packages +yum install -y git graphviz maven nodejs wget bzip2 python36 python36-pip pygobject3 cargo + +############# +python3 -m pip install --upgrade pip setuptools seqdiag blockdiag actdiag nwdiag convert syntrax racks opc-diag +npm install vega pango + +############# install stack +wget -qO- https://get.haskellstack.org/ | sh + +############# install PhantomJS +wget https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 +bunzip2 phantomjs-2.1.1-linux-x86_64.tar.bz2 +tar -xvf phantomjs-2.1.1-linux-x86_64.tar + +############# install ERD +cd libs +mkdir third-party +cd third-party +git clone https://github.com/BurntSushi/erd.git +cd erd +stack install +read -p "Add stack ($HOME/.local/bin) to PATH ($PATH) ? (y/n)" -n 1 -r YES_NO +if [ $YES_NO = "y" ]; then + echo 'export PATH=$PATH:$HOME/.local/bin' >> ~/.bash_profile + . ~/.bash_profile +fi + +############# install SVGBob +cargo install svgbob_cli +read -p "Add cargo ($HOME/.cargo/bin) to PATH ($PATH) ? (y/n)" -n 1 -r YES_NO +if [ $YES_NO = "y" ]; then + echo 'export PATH=$PATH:$HOME/.cargo/bin' >> ~/.bash_profile + . ~/.bash_profile +fi + +############# +cd ../../.. +mvn jetty:run-exploded + diff --git a/content/Apache/Wayang/quickstart-guide/install-deps-mac.sh b/content/Apache/Wayang/quickstart-guide/install-deps-mac.sh new file mode 100755 index 00000000..6b33d8f4 --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/install-deps-mac.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +cd libs +mkdir third-party +cd third-party + +# Dependency: Stack +curl -sSL https://get.haskellstack.org/ | sh + +# Install ERD +git clone https://github.com/BurntSushi/erd.git +cd erd +#stack init +stack build --system-ghc +port install erd +cd .. + +# Install Mermaid +npm install mermaid.cli + +# Install PhantomJS +wget https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-macosx.zip +unzip phantomjs-2.1.1-macosx.zip + +# SVGBob +cargo install svgbob_cli --path ./svgbob + +# Syntrax +pip install --upgrade syntrax +pip install pycairo +brew install pygobject3 + +# Vega +npm install vega + +# ImageMagic +wget https://imagemagick.org/download/binaries/ImageMagick-x86_64-apple-darwin17.7.0.tar.gz +tar xvzf ImageMagick-x86_64-apple-darwin17.7.0.tar.gz diff --git a/content/Apache/Wayang/quickstart-guide/pom.xml b/content/Apache/Wayang/quickstart-guide/pom.xml new file mode 100644 index 00000000..93547267 --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/pom.xml @@ -0,0 +1,40 @@ + + + 4.0.0 + + + org.apache.training + content-parent-pom + 1.0.0 + + + + + + quickstart-guide + + apache-wayang + 1.0-SNAPSHOT + war + + Training: Tools: Slide Template: Asciidoc to reveal.js Maven apache-wayang + An apache-wayang project that demonstrates how to integrate convert Asciidoc to a reveal.js presentation with the Asciidoctor Maven plugin. + + diff --git a/content/Apache/Wayang/quickstart-guide/src/.DS_Store b/content/Apache/Wayang/quickstart-guide/src/.DS_Store new file mode 100644 index 00000000..7b0d3672 Binary files /dev/null and b/content/Apache/Wayang/quickstart-guide/src/.DS_Store differ diff --git a/content/Apache/Wayang/quickstart-guide/src/main/.DS_Store b/content/Apache/Wayang/quickstart-guide/src/main/.DS_Store new file mode 100644 index 00000000..daeff97d Binary files /dev/null and b/content/Apache/Wayang/quickstart-guide/src/main/.DS_Store differ diff --git a/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/.DS_Store b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/.DS_Store new file mode 100644 index 00000000..2baf1363 Binary files /dev/null and b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/.DS_Store differ diff --git a/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/_settings.adoc b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/_settings.adoc new file mode 100644 index 00000000..261a3969 --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/_settings.adoc @@ -0,0 +1,36 @@ +//// + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +//// + +:goto: +:menu: +:navigation: +:status: +:arrows: +:revealjs_theme: cc_black +:revealjs_progress: true +:revealjs_slidenumber: true +:pdf-page-size: A4 +:revealjs_center: false +:revealjs_history: true +:icons: font +:imagesdir: images +:sourcedir: ../java +:title-slide-background-image: background-dark-orig.jpg +:idprefix: +:idseparator: - diff --git a/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/diagrams/mermaid-flowchart-test.mmd b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/diagrams/mermaid-flowchart-test.mmd new file mode 100644 index 00000000..d5bf6cb3 --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/diagrams/mermaid-flowchart-test.mmd @@ -0,0 +1,5 @@ +graph TD; + A-->B; + A-->C; + B-->D; + C-->D; diff --git a/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/index.adoc b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/index.adoc new file mode 100644 index 00000000..9b82a50e --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/index.adoc @@ -0,0 +1,621 @@ +//// + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +//// + +include::_settings.adoc[] +:presenter_name: Some Cool Person +:presenter_company: Apache Software Foundation + +== Example Presentation +Doc Writer +{docdate} +:revnumber: {project-version} +:example-caption!: + +== Introduction + +[%step] +* This is a presentation for an example project. +* This project does something. +* We just haven't decided what that is yet. + +== Speaker Notes + +Press the `*s*` key to access speaker notes. + +[NOTE.speaker] +-- +You've unlocked the super secret notes area. +Neat huh? +-- + +== Blank screen + +You can press `*b*` or `*.*` to blank your current screen. + +Hit it again to bring it back. + +== Overview + +Press the `*esc*` key to get an overview of your slides. + +== Attributes + +Press the down arrow key to see the next sub-slide. + +=== Attributes Part 1 +.Built-in +asciidoctor-version:: {asciidoctor-version} +safe-mode-name:: {safe-mode-name} +docdir:: {docdir} +docfile:: {docfile} +imagesdir:: {imagesdir} + +=== Attributes Part 2 +.Custom +project-version:: {project-version} +sourcedir:: {sourcedir} + +== Including documents from subdir + +.include::subdir/_b.adoc[] + +include::subdir/_b.adoc[] + +== Images + +[.thumb] +image::logo-apache.png[scaledwidth=75%] + +== Videos + +Play videos from Youtube, or locally + +video::BKorP55Aqvg[youtube, width=640, height=480] + +== Lists + +Asciidoctor allows a wide variety of list option. + +For details see the documentation starting at: http://asciidoctor.org/docs/user-manual/#unordered-lists + +Press the down arrow key to see the some examples. + +=== Unordered List + +* Protons +* Electrons +* Neutrons + +=== Ordered List + +. Protons +. Electrons +. Neutrons + +=== Nested List + +Nest lists, by duplicating the list-char + +* Protons +** Electrons +* Neutrons + +== Tables + +[cols="2*", options="header"] +|=== +| Header A | Header B +| Cell A1 | Cell B1 +| Cell A2 | Cell B2 +| Cell A3 | Cell B3 +|=== + +== Source Code + +[source,java] +.Java code from project +---- +include::{sourcedir}/apache-wayang/StringUtils.java[tags=contains,indent=0] +---- + +== Two columns + +[.west] +* Blah +* Blah +* Blah +* Blah +* Blah + +[.east] +* Blah +* Blah +* Blah +* Blah +* Blah + +== Fontawesome Icons + +* icon:check[role=green] +* icon:wrench[] +* icon:beer[role=orange] + +[NOTE.speaker] +-- +* Find icons here: https://fontawesome.com/v4.7.0/icons/ +* Color icons by defining css styles in apache.css and referencing them with "role" +-- + +== Diagrams + +Press the down arrow key to see the different types of diagrams + +=== Ditaa Diagram + +[ditaa,asciidoctor-diagram-process] +.... + +-------------+ + | Asciidoctor |-------+ + | diagram | | + +-------------+ | PNG out + ^ | + | ditaa in | + | v + +--------+ +--------+----+ /---------------\ + | |---+ Asciidoctor +--->| | + | Text | +-------------+ | Beautiful | + |Document| | !magic! | | Output | + | {d}| | | | | + +---+----+ +-------------+ \---------------/ + : ^ + | Lots of work | + +-----------------------------------+ +.... + +=== PlantUML Diagram + +[plantuml,auth-protocol] +.... +Alice -> Bob: Authentication Request +Bob --> Alice: Authentication Response + +Alice -> Bob: Another authentication Request +Alice <-- Bob: another authentication Response +.... + +=== GraphVIZ Diagram + +[graphviz, dot-example, svg] +---- +digraph g { + a -> b + b -> c + c -> d + d -> a +} +---- + +=== Seqdiag Diagram + +[seqdiag,s7-communication,svg] +.... +{ + A -> B -> C; + B -> D; +} +.... + +=== Blockdiag Diagram + +[blockdiag,blockdiag-test] +.... +{ + A -> B -> C -> D; + A -> E -> F -> G; +} +.... + +=== Actdiag Diagram + +[actdiag,actdiag-test] +.... +{ + write -> convert -> image + + lane user { + label = "User" + write [label = "Writing reST"]; + image [label = "Get diagram IMAGE"]; + } + lane actdiag { + convert [label = "Convert reST to Image"]; + } +} +.... + +=== Nwdiag Diagram + +[nwdiag,nwdiag-test] +.... +{ + network dmz { + address = "210.x.x.x/24" + + web01 [address = "210.x.x.1"]; + web02 [address = "210.x.x.2"]; + } + network internal { + address = "172.x.x.x/24"; + + web01 [address = "172.x.x.1"]; + web02 [address = "172.x.x.2"]; + db01; + db02; + } +} +.... + +=== Rackdiag Diagram + +[rackdiag,rackdiag-test] +.... +{ + // Change order of rack-number as ascending + ascending; + + // define height of rack + 12U; + + // define description of rack + description = "Tokyo/1234 East"; + + // define rack units + 1: UPS [2U]; // define height of unit + 3: DB Server [5kg] // define weight of unit + 4: Web Server [0.5A] // define ampere of unit + 5: Web Server + 6: Web Server + 7: Load Balancer + 8: L3 Switch +} +.... + +=== Packetdiag Diagram + +[packetdiag,packetdiag-test,svg] +.... +{ + colwidth = 32 + node_height = 72 + + 0-15: Source Port + 16-31: Destination Port + 32-63: Sequence Number + 64-95: Acknowledgment Number + 96-99: Data Offset + 100-105: Reserved + 106: URG [rotate = 270] + 107: ACK [rotate = 270] + 108: PSH [rotate = 270] + 109: RST [rotate = 270] + 110: SYN [rotate = 270] + 111: FIN [rotate = 270] + 112-127: Window + 128-143: Checksum + 144-159: Urgent Pointer + 160-191: (Options and Padding) + 192-223: data [colheight = 3] +} +.... + +=== Meme Diagram + +meme::../resources/images/toddy.jpg[May the Source,Be With You] + +=== Entity Relation Diagram + +[erd,erd-test,svg] +.... +[Person] +*name +height +weight ++birth_location_id + +[Location] +*id +city +state +country + +# Cardinality Syntax +# 0 or 1 ? +# exactly 1 1 +# 0 or more * +# 1 or more + +Person *--1 Location +.... + +[NOTE.speaker] +-- +-- + +//// +=== Mermaid: Flowchart + +mermaid::diagrams/mermaid-flowchart-test.mmd[png] + +[NOTE.speaker] +-- +-- + +=== Mermaid: Sequence Diagram + +[mermaid,"mermaid-sequence-diagram-test",svg] +.... +sequenceDiagram + participant Alice + participant Bob + Alice->John: Hello John, how are you? + loop Healthcheck + John->John: Fight against hypochondria + end + Note right of John: Rational thoughts
prevail... + John-->Alice: Great! + John->Bob: How about you? + Bob-->John: Jolly good! +.... + +=== Mermaid: Gant Diagram + +[mermaid,mermaid-gant-diagram-test,svg] +.... +gantt + dateFormat YYYY-MM-DD + title Adding GANTT diagram functionality to mermaid + section A section + Completed task :done, des1, 2014-01-06,2014-01-08 + Active task :active, des2, 2014-01-09, 3d + Future task : des3, after des2, 5d + Future task2 : des4, after des3, 5d + section Critical tasks + Completed task in the critical line :crit, done, 2014-01-06,24h + Implement parser and jison :crit, done, after des1, 2d + Create tests for parser :crit, active, 3d + Future task in critical line :crit, 5d + Create tests for renderer :2d + Add to mermaid :1d +.... +//// + +=== SVGBob: Mindmap + +[svgbob,svgbob-mindmap-test,svg] +.... + .--> Alpha + / + .----> Initial Release + Planning *-------. / \ + \ / '---> Patch 1 + Initial research \ / \ + * \ / '--> Patch 2 + \ \ .---------> Beta + \ \ / + \ o o _______ + \ .---. o--.___ / \ + '------> ( ) '----*---> . Release . + `---' o \_______/ + o o o \ + / \ \ \ + .--' \ \ \ + / \ \ '----+-> Push backs + . \ \ \ + /| \ \ '----> Setbacks + / . \ \ + V /| \ '-----> Reception + Team / . \ + v /| \ + Worklaod / . '-->> Career change + V / + PTO / + V + Bug +.... + +=== SVGBob: Circuit Diagram + +[svgbob,svgbob-circuit-diagram-test,svg] +.... + +10-15V ___0,047R + *------o------o-|___|-o--o---------o----o-------. + | | | | | | | + --- | | .+. | | | + 470uF | + | | | | | | .+. + *------o '--. | '+' .+. | '-' + | |6 |7 |8 1k | | | | + GND .-+----+--+--. | | | | + | | '+' | | + | |1 | |/ BC | + | +------o--+ 547 | + | | | |`> | + | | ,+. | | + | | 220R| | o----||-+ IRF9Z34 + | | | | | |+-> + | MC34063 | `+' | ||-+ + | | | | | BYV29 -12V6 + | | '----' o--|<-o----o--X OUT + | |2 | | | + | |--o C| | | + | | GND 30uH C| | --- 470 + | |3 1nF C| | + | |-------||--. | | | + + '-----+----+-' | GND | GND + 5| 4| | | + | '-------------o-------------o + | ___ | + '------/\/\/------------o--|___|-' + | 1k0 + .+. + | | 5k6 + 3k3 + | | in Serie + '+' + | + GND +.... + +=== Syntrax + +[syntrax,syntrax-test,svg] +.... +indentstack(10, + line(opt('-'), choice('0', line('1-9', loop(None, '0-9'))), + opt('.', loop('0-9', None))), + + line(opt(choice('e', 'E'), choice(None, '+', '-'), loop('0-9', None))) +) +.... + +//// +=== Umlet + +[umlet,umlet-test,svg] +.... +com.umlet.element.base.Relation73916232264lt=<- +when(spidersensor="rotate") +/block spider161;244;161;34;71;34;71;74com.umlet.element.custom.FinalState8902602020transparentSelection=falsecom.umlet.element.base.Relation750170160137lt=<- +after (10s) +/ block spider140;100;66;100;66;20com.umlet.element.custom.State34042010040waittransparentSelection=falsecom.umlet.element.custom.HistoryState2304402020transparentSelection=falsecom.umlet.element.base.Relation23041613054lt=<- +restart20;34;110;34com.umlet.element.base.Relation2703969054lt=<- +pause70;34;20;34com.umlet.element.custom.FinalState904002020transparentSelection=falsecom.umlet.element.base.Relation46256114164lt=<- +after (10s) +/timeout54;144;54;34;94;34com.umlet.element.base.Relation230110190170lt=<- +timeout20;150;110;150;110;20;170;20com.umlet.element.custom.State70090180100accept +boarding pass +-- +entry/ release card +do/release spidertransparentSelection=truecom.umlet.element.base.Relation540140205100lt=<- +[passenger booked]160;20;120;80;20;80com.umlet.element.base.Relation450210239190lt=<- +[passenger not booked]219;170;99;170;99;20com.umlet.element.custom.State67035012050reject +boarding passtransparentSelection=falsecom.umlet.element.base.Relation480130142100lt=<- +result of search71;80;71;20com.umlet.element.base.Relation2707015040lt=<-130;20;20;20com.umlet.element.custom.ThreeWayRelation5402102020transparentSelection=falsecom.umlet.element.custom.State14060150420read boarding pass +--transparentSelection=truecom.umlet.element.custom.State4006018090check passenger +-- +entry/start search +do/blink lamptransparentSelection=truecom.umlet.element.custom.FinalState1704102020transparentSelection=falsecom.umlet.element.custom.State15024010040read +passenger IDtransparentSelection=falsecom.umlet.element.custom.State15033010040identify +passengertransparentSelection=falsecom.umlet.element.base.Relation1602604090lt=<-20;70;20;20com.umlet.element.base.Relation1601004070lt=<-20;50;20;20com.umlet.element.base.Relation1603504080lt=<-20;60;20;20com.umlet.element.base.Relation1401707890lt=<- +[valid]39;70;39;20com.umlet.element.custom.State15015010040check +validitytransparentSelection=falsecom.umlet.element.custom.InitialState1701002020transparentSelection=false +.... +//// + +=== Vega + +[vega,vega-test,svg] +.... +{ + "$schema": "https://vega.github.io/schema/vega/v5.json", + "width": 1000, + "height": 500, + "padding": 5, + + "data": [ + { + "name": "iot-riskks1", + "values": [ + {"cheese": "Security", "amount": 43}, + {"cheese": "IT/OT Integration", "amount": 30}, + {"cheese": "Unclear ROI", "amount": 28}, + {"cheese": "Technical Expertise", "amount": 26}, + {"cheese": "Interoperability", "amount": 26}, + {"cheese": "Data Portability", "amount": 24}, + {"cheese": "Vendor Risk", "amount": 22}, + {"cheese": "Transition Risk", "amount": 22}, + {"cheese": "Legal/regulatory issues", "amount": 22}, + {"cheese": "Network constraints", "amount": 21}, + {"cheese": "Vendor lock-in", "amount": 18} + ] + } + ], + + "scales": [ + { + "name": "xscale", + "type": "band", + "domain": {"data": "iot-riskks1", "field": "cheese"}, + "range": "width", + "padding": 0.05, + "round": true + }, + { + "name": "yscale", + "domain": {"data": "iot-riskks1", "field": "amount"}, + "nice": true, + "range": "height" + } + ], + + "marks": [ + { + "type": "rect", + "from": {"data":"iot-riskks1"}, + "encode": { + "enter": { + "x": {"scale": "xscale", "field": "cheese"}, + "width": {"scale": "xscale", "band": 1}, + "y": {"scale": "yscale", "field": "amount"}, + "y2": {"scale": "yscale", "value": 0} + }, + "update": { + "fill": {"value": "steelblue"} + } + } + } + ] +} +.... + +== Embedded HTML/JavaScript + +++++ +
+ +++++ diff --git a/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/subdir/_b.adoc b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/subdir/_b.adoc new file mode 100644 index 00000000..cb5008cd --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/subdir/_b.adoc @@ -0,0 +1,26 @@ +//// + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +//// + +content from _src/docs/asciidoc/subdir/_b.adoc_. + +.include::_c.adoc[] +[example] +-- +include::_c.adoc[] +-- diff --git a/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/subdir/_c.adoc b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/subdir/_c.adoc new file mode 100644 index 00000000..447ae420 --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/src/main/asciidoc/subdir/_c.adoc @@ -0,0 +1,20 @@ +//// + + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +//// + +content from _src/docs/asciidoc/subdir/_c.adoc_. diff --git a/content/Apache/Wayang/quickstart-guide/src/main/java/apache-wayang/StringUtils.java b/content/Apache/Wayang/quickstart-guide/src/main/java/apache-wayang/StringUtils.java new file mode 100644 index 00000000..c9dcd814 --- /dev/null +++ b/content/Apache/Wayang/quickstart-guide/src/main/java/apache-wayang/StringUtils.java @@ -0,0 +1,27 @@ +/* + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. +*/ +package apachewayang; + +public class StringUtils { + // tag::contains[] + public boolean contains(String haystack, String needle) { + return haystack.contains(needle); + } + // end::contains[] +} diff --git a/content/Apache/Wayang/quickstart-guide/src/main/resources/images/toddy.jpg b/content/Apache/Wayang/quickstart-guide/src/main/resources/images/toddy.jpg new file mode 100644 index 00000000..9db0e725 Binary files /dev/null and b/content/Apache/Wayang/quickstart-guide/src/main/resources/images/toddy.jpg differ diff --git a/content/Apache/pom.xml b/content/Apache/pom.xml index f93556b5..fb976811 100644 --- a/content/Apache/pom.xml +++ b/content/Apache/pom.xml @@ -48,6 +48,7 @@ Samza Spark Training + Wayang ZooKeeper diff --git a/reports/DRAFT-podling-report-AUGUST-2023.md b/reports/DRAFT-podling-report-AUGUST-2023.md new file mode 100644 index 00000000..42a9ae2a --- /dev/null +++ b/reports/DRAFT-podling-report-AUGUST-2023.md @@ -0,0 +1,120 @@ +August 2023 + +Training + +The Training project aims to develop resources which can be used for training purposes in various media formats, languages and for various Apache and non-Apache target projects. + +Training has been incubating since 2019-02-21. + +In June 2023, we started adjusting the project goals. Besides a pure focus on content creation tools we have identified the need for two additional aspects: + +(1) For ASF processes some documentation exists, e.g. https://community.apache.org/contributor-ladder, but the ASF member journey is still a challenge for many new members, especially those with less coding experience.  +As a solution it has been suggested to provide training material for the following problems:  +* You've just graduated to a TLP. What are the challenges you will now face? +* New committer onboarding +* New PMC member onboarding +* New PMC Chair onboarding +* New board member onboarding + +(2) Creation of some general recommendations/guidelines for: + * managing/validation of training material contributions of arbitrary ASF projects has been propsed. + +We expand our focus now on the following aspects: + +We have to work on strategies to increase the participation and the visibility of the project and its goals. + +Besides requests for contributions from individual contributors from other Apache projects we want to show what the Apache Training project can offer to those projects. + +We should build on the assumption that synergy and network effects can help us to increase the community around training and skill management topics. + + + + +Three most important unfinished issues to address before graduating: + +Improve the community participation (other Apache projects should actively be linked to this project, since they "have the content") +(Re)define the short-, mid-, and long-term goals of the project. +Select an approach to handle non coding / non document related tasks (e.g., community related activities on events) +Create a strategy how this projects contributes to or is supported by other Apache projects +Create a tutorial on how the tools provided by this project can be used to create own content +Review current policies around contribution review and releases to find a pragmatic compromise + +Are there any issues that the IPMC or ASF Board need to be aware of? + +The Training Project needs more contribution from other Apache projects, hence a clear communication strategy for interlinking existing projects with the Apache Training project is needed. + +We need feedback from other Apache projects on the presentations and trainings developed till now. + +In order to improve this collaboration we suggest to investigate the status of other project’s documentation in a systematic approach, especially with an eye on available training related material. + + + +How has the community developed since the last report? + + - no changes + + + + +How has the project developed since the last report? + + - Initial discussion regarding usage of content created/delivered by LLMs has been started. + - In July, a first documentation inspection and fact extraction tool (Python script) has been provided. + - A pull request is open: https://github.com/apache/incubator-training/pull/87. + + + + +How would you assess the podling's maturity? + +There are currently some issues with this podling that we are working to address, we believe that the issues are not yet critical and can be turned around from within the community but want to make the board aware of this early on. + + - Initial setup oft he project’s structure: Goals and strategy needs refinement. + Working towards first release: What type of artefacts to release is not clear, due to unclear goals and strategy. +  Community building: The community exists of a very small number of people but is more or less invisible, as of today (April 2023). +  Nearing graduation: Currently, we are still pretty far away from graduation, but we get closer. + + + + +When were the last committers or PPMC members elected? + + - November 2020 + + + + +Have your mentors been helpful and responsive? + + - Mentors have been responsive and helpful when reached out to. + + + + +Is the PPMC managing the podling's brand / trademarks? + + - The name search process has not yet been started, as there is still time to do that if and when graduation draws nearer. + + - The PPMC is actively monitoring usage of the Podlings current name on other sites. + + - The Podling is not directly affiliating with any sponsors donating content and presenting an independent image on the webpage. + + + + +Signed-off-by: + + - (training) Craig Russell + Comments:  + + + - (training) Christofer Dutz + Comments: + + + - (training) Justin Mclean + Comments: + + + - (training) Lars Francke + Comments: diff --git a/tools/repo-doc-analysis/.DS_Store b/tools/repo-doc-analysis/.DS_Store new file mode 100644 index 00000000..b2a6b3e6 Binary files /dev/null and b/tools/repo-doc-analysis/.DS_Store differ diff --git a/tools/repo-doc-analysis/bootstrap.sh b/tools/repo-doc-analysis/bootstrap.sh new file mode 100755 index 00000000..b85af2c7 --- /dev/null +++ b/tools/repo-doc-analysis/bootstrap.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +source env.sh + +pip install langchain==0.0.123 # https://github.com/hwchase17/langchain/releases +pip install openai +pip install faiss-cpu +pip install tiktoken + +create_directory_if_not_exists() { + local directory_name=$1 + + if [ ! -d "$directory_name" ]; then + mkdir "$directory_name" + echo "Directory '$directory_name' created successfully." + else + echo "Directory '$directory_name' already exists." + fi +} + +create_directory_if_not_exists "$DOC_INSPECTION_WORKSPACE" +create_directory_if_not_exists "$DOC_INSPECTION_DATASTORE" + +./scripts/bootstrap-data.sh \ No newline at end of file diff --git a/tools/repo-doc-analysis/env.sh b/tools/repo-doc-analysis/env.sh new file mode 100644 index 00000000..92ee15d4 --- /dev/null +++ b/tools/repo-doc-analysis/env.sh @@ -0,0 +1,3 @@ +export DOC_INSPECTION_WORKSPACE=~/doc-inspection-base +export DOC_INSPECTION_DATASTORE=~/doc-inspection-store + diff --git a/tools/repo-doc-analysis/scripts/bootstrap-data.sh b/tools/repo-doc-analysis/scripts/bootstrap-data.sh new file mode 100755 index 00000000..4cf98cc0 --- /dev/null +++ b/tools/repo-doc-analysis/scripts/bootstrap-data.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# For development and experiments we select a small number of ASF projects. +#----------------------------------------------------------------------------------------------------------------------- + +clone_or_pull_github_repo() { + local repository_url=$1 + local directory_name=$2 + + if [ -d "$DOC_INSPECTION_WORKSPACE/$directory_name" ]; then + echo "Directory '$directory_name' already exists. Performing a git pull." + cd "$DOC_INSPECTION_WORKSPACE/$directory_name" || exit + git pull + else + echo "Directory '$DOC_INSPECTION_WORKSPACE/$directory_name' does not exist. Performing a git clone." + git clone "$repository_url" "$DOC_INSPECTION_WORKSPACE/$directory_name" + cd "$DOC_INSPECTION_WORKSPACE/$directory_name" || exit + git fetch + fi +} + +clone_or_pull_github_repo "https://github.com/apache/incubator-wayang" "incubator-wayang" + +clone_or_pull_github_repo "https://github.com/apache/kafka" "kafka" + diff --git a/tools/repo-doc-analysis/scripts/corpus_analyser_qa_langchain.py b/tools/repo-doc-analysis/scripts/corpus_analyser_qa_langchain.py new file mode 100755 index 00000000..95984b20 --- /dev/null +++ b/tools/repo-doc-analysis/scripts/corpus_analyser_qa_langchain.py @@ -0,0 +1,259 @@ +# -*- coding: utf-8 -*- + +# +# This script is inspired by the work of Quy Tang: +# https://medium.com/singapore-gds/integrating-chatgpt-with-internal-knowledge-base-and-question-answer-platform-36a3283d6334 +# + +# +# Run the script like this: +# +# (1) Provide an OPENAI_API_KEY in your command or as ENV variable. +# +# source env.sh; export OPENAI_API_KEY="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"; python corpus_analyser_qa_langchain.py +# +# (2) Select the project you want to analyse. (Please see folder docs-main for more details to prep the data!) +import project_metadata.kafka as pmd +import project_metadata.incubator_wayang as pmd +# +# (3) Select the question-set you want to work with. +import question_sets.question_sets as qs + +import os +from getpass import getpass + +import os +import pathlib +import re + +from langchain.docstore.document import Document +from langchain.document_loaders import TextLoader + +from langchain.text_splitter import CharacterTextSplitter +from langchain.embeddings.openai import OpenAIEmbeddings +from langchain.vectorstores import FAISS + +def get_file_size(file_path): + if os.path.exists(file_path): + file_size = os.path.getsize(file_path) + return file_size + else: + return -1 # Return -1 if the file is not found + +name_filter = "**/*.md" +separator = "\n### " # This separator assumes Markdown docs from the repo uses ### as logical main header most of the time +chunk_size_limit = 2500 +max_chunk_overlap = 100 + +repo_path = pathlib.Path(os.path.join(pmd.DOCS_FOLDER, pmd.REPO_DOCUMENTS_PATH)) +document_files = list(repo_path.glob(name_filter)) + +print( f"*** FILENAMES of indexed documents> in REPO_PATH: {repo_path}") +for fn in document_files: + size = get_file_size( fn ) + if size > chunk_size_limit: + p = "*" + print( f"{p}{size} {fn}" ) + else: + p = " " + +def convert_path_to_doc_url(doc_path): + # Convert from relative path to actual document url + return re.sub(f"{pmd.DOCS_FOLDER}/{pmd.REPO_DOCUMENTS_PATH}/(.*)\.[\w\d]+", f"{pmd.DOCUMENT_BASE_URL}/\\1", str(doc_path)) + +documents = [ + Document( + page_content=open(file, "r").read(), + size=get_file_size( file ), + metadata={"source": convert_path_to_doc_url(file)} + ) + for file in document_files +] + +print( "\n<# of documents>") +print( len(documents) ) +print() + + +text_splitter = CharacterTextSplitter(separator=separator, chunk_size=chunk_size_limit, chunk_overlap=max_chunk_overlap) +split_docs = text_splitter.split_documents(documents) + +print( len(split_docs) ) + +for doc in split_docs: + print( type(doc) ) + print( doc ) + print() + +exit(-1) + +import tiktoken + +# create a GPT-4 encoder instance +enc = tiktoken.encoding_for_model("gpt-4") + +total_word_count = sum(len(doc.page_content.split()) for doc in split_docs) +total_token_count = sum(len(enc.encode(doc.page_content)) for doc in split_docs) + +print(f"\nTotal word count: {total_word_count}") +print(f"Estimated tokens: {total_token_count}") +print(f"Estimated cost of embedding: ${total_token_count * 0.0004 / 1000}") +print() + +### Create Vector Store using OpenAI + +key = os.environ["OPENAI_API_KEY"] +if key is None: + os.environ["OPENAI_API_KEY"] = getpass("Paste your OpenAI API key here and hit enter:") +else: + print( "> Use OpenAI API-key from ENV variable OPENAI_API_KEY.") + +embeddings = OpenAIEmbeddings() +vector_store = FAISS.from_documents(split_docs, embeddings) + +print( f"> Save vector store to files in {pmd.DATA_STORE_DIR} for reuse." ) +vector_store.save_local(pmd.DATA_STORE_DIR) + +from langchain.embeddings.openai import OpenAIEmbeddings +from langchain.vectorstores import FAISS + +if os.path.exists(pmd.DATA_STORE_DIR): + print(f"*** Loading index files. *** \n> Try to get the index.faiss and index.pkl files from {pmd.DATA_STORE_DIR} directory...") + vector_store = FAISS.load_local( + pmd.DATA_STORE_DIR, + OpenAIEmbeddings() + ) + print( "> Done.") +else: + print(f"!!! Missing files. !!! \n> Upload index.faiss and index.pkl files to {pmd.DATA_STORE_DIR} directory first!") + exit(-1) + + + +print( "> Query using the vector store with ChatGPT integration." ) +print( "> Set up the chat model and specific prompt." ) + +from langchain.prompts.chat import ( + ChatPromptTemplate, + SystemMessagePromptTemplate, + HumanMessagePromptTemplate, +) + +system_template="""Use the following pieces of context to answer the users question. +Take note of the sources and include them in the answer in the format: "SOURCES: source1 source2", use "SOURCES" in capital letters regardless of the number of sources. +If you don't know the answer, just say that "I don't know", don't try to make up an answer. +---------------- +{summaries}""" +messages = [ + SystemMessagePromptTemplate.from_template(system_template), + HumanMessagePromptTemplate.from_template("{question}") +] +prompt = ChatPromptTemplate.from_messages(messages) + +from langchain.chat_models import ChatOpenAI +from langchain.chains import RetrievalQAWithSourcesChain + +chain_type_kwargs = {"prompt": prompt} +llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0, max_tokens=1500) # Modify model_name if you have access to GPT-4 +chain = RetrievalQAWithSourcesChain.from_chain_type( + llm=llm, + chain_type="stuff", + retriever=vector_store.as_retriever(), + return_source_documents=True, + chain_type_kwargs=chain_type_kwargs +) + +print( "> Prompt is defined.") + +def print_problem(query,ex,f): + output_text = f"""\n-PROBLEM------------------------------------------------------------------------------ + ### Question: + -------------------------------------------------------------------------------\n + {query} + -------------------------------------------------------------------------------\n + {ex} + """ + f.write( output_text ) + # print( output_text ) + +def print_result(result, query, f): + output_text = """ +## Question: +{0} + +## Answer: +{1} + +### Sources: +{2} + +#### All relevant sources: ++ {3} +""".format( + query, + result['answer'], + result['sources'], + '\n+ '.join(list(set([doc.metadata['source'] for doc in result['source_documents']]))), + ) + + f.write( output_text ) + print( output_text ) + +import logging +logging.getLogger("openai").setLevel(logging.DEBUG) # logging.INFO or logging.DEBUG + +QUESTION_SET = qs.queryies2 +print( f"> Use question set: [{QUESTION_SET[1]}]") + +import os + +directory = f"./../../target/reports/{pmd.PROJECT_FOLDER}" + +# Check if the directory exists +if not os.path.exists(directory): + # If it doesn't exist, create it + os.makedirs(directory) + print( f"> created report directory: {directory}") + +fa = open( f"{directory}/a_answers_{QUESTION_SET[2]}.md", "w") +fp = open( f"{directory}/b_problems_{QUESTION_SET[2]}.md", "w") + +z=0 +e=0 + +for q in QUESTION_SET[0]: + + try: + result = chain(q) + print_result(result, q, fa) + z=z+1 + except Exception as ex: + print_problem(q, ex, fp) + e=e+1 + +fa.close() +fp.close() + +print( "Processing summary:") +print( "===================") +print( f"# {z} PROMPTS processed." ) +print( f"# {e} PROBLEMS observed." ) +print() +print( f" : {directory}") +print() +print( f" in REPO_PATH : {repo_path}") +print( document_files ) +print() +print( f"\n<# of indexed documents> : {len(documents) }") +print() + + + + + + + + + + + diff --git a/tools/repo-doc-analysis/scripts/project_metadata/__pycache__/incubator_wayang.cpython-311.pyc b/tools/repo-doc-analysis/scripts/project_metadata/__pycache__/incubator_wayang.cpython-311.pyc new file mode 100644 index 00000000..54c4601a Binary files /dev/null and b/tools/repo-doc-analysis/scripts/project_metadata/__pycache__/incubator_wayang.cpython-311.pyc differ diff --git a/tools/repo-doc-analysis/scripts/project_metadata/__pycache__/kafka.cpython-311.pyc b/tools/repo-doc-analysis/scripts/project_metadata/__pycache__/kafka.cpython-311.pyc new file mode 100644 index 00000000..71dd2902 Binary files /dev/null and b/tools/repo-doc-analysis/scripts/project_metadata/__pycache__/kafka.cpython-311.pyc differ diff --git a/tools/repo-doc-analysis/scripts/project_metadata/incubator_wayang.py b/tools/repo-doc-analysis/scripts/project_metadata/incubator_wayang.py new file mode 100644 index 00000000..125935ff --- /dev/null +++ b/tools/repo-doc-analysis/scripts/project_metadata/incubator_wayang.py @@ -0,0 +1,27 @@ +import os + +doc_inspection_datastore = os.getenv('DOC_INSPECTION_DATASTORE') + +if doc_inspection_datastore: + print(f"Value of DOC_INSPECTION_DATASTORE is: {doc_inspection_datastore}") +else: + print("DOC_INSPECTION_DATASTORE is not set.") + exit(-1) + +doc_inspection_workspace = os.getenv('DOC_INSPECTION_WORKSPACE') + +if doc_inspection_workspace: + print(f"Value of DOC_INSPECTION_WORKSPACE is: {doc_inspection_workspace}") +else: + print("DOC_INSPECTION_WORKSPACE is not set.") + exit(-1) + +PROJECT_FOLDER = "incubator-wayang" +REPO_URL = "https://github.com/apache/incubator-wayang" # Source URL +DOCS_FOLDER = f"{doc_inspection_workspace}/incubator-wayang/" # Folder to check out to +REPO_DOCUMENTS_PATH = "wayang-docs/" # Set to "" to index the whole REPOSITORY +DOCUMENT_BASE_URL = "https://github.com/apache/incubator-wayang" # Actual URL +DATA_STORE_DIR = f"{doc_inspection_datastore}/incubator-wayang" + +print( "> Inspect incubator-wayang repository ... ") + diff --git a/tools/repo-doc-analysis/scripts/project_metadata/kafka.py b/tools/repo-doc-analysis/scripts/project_metadata/kafka.py new file mode 100644 index 00000000..b8ba2244 --- /dev/null +++ b/tools/repo-doc-analysis/scripts/project_metadata/kafka.py @@ -0,0 +1,26 @@ +import os + +doc_inspection_datastore = os.getenv('DOC_INSPECTION_DATASTORE') + +if doc_inspection_datastore: + print(f"Value of DOC_INSPECTION_DATASTORE is: {doc_inspection_datastore}") +else: + print("DOC_INSPECTION_DATASTORE is not set.") + exit(-1) + +doc_inspection_workspace = os.getenv('DOC_INSPECTION_WORKSPACE') + +if doc_inspection_workspace: + print(f"Value of DOC_INSPECTION_WORKSPACE is: {doc_inspection_workspace}") +else: + print("DOC_INSPECTION_WORKSPACE is not set.") + exit(-1) + +PROJECT_FOLDER = "kafka" +REPO_URL = "https://github.com/apache/kafka" # Source URL +DOCS_FOLDER = f"{doc_inspection_workspace}/kafka/" # Folder to check out to +REPO_DOCUMENTS_PATH = "" # Set to "" to index the whole data folder +DOCUMENT_BASE_URL = "https://github.com/apache/kafka" # Actual URL +DATA_STORE_DIR = f"{doc_inspection_datastore}/kafka" + +print( "> Inspect Kafka repository ...") diff --git a/tools/repo-doc-analysis/scripts/question_sets/__pycache__/question_sets.cpython-311.pyc b/tools/repo-doc-analysis/scripts/question_sets/__pycache__/question_sets.cpython-311.pyc new file mode 100644 index 00000000..e8b0b752 Binary files /dev/null and b/tools/repo-doc-analysis/scripts/question_sets/__pycache__/question_sets.cpython-311.pyc differ diff --git a/tools/repo-doc-analysis/scripts/question_sets/question_sets.py b/tools/repo-doc-analysis/scripts/question_sets/question_sets.py new file mode 100644 index 00000000..5c9f0acd --- /dev/null +++ b/tools/repo-doc-analysis/scripts/question_sets/question_sets.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +# +# This module defines the question-sets used to create the PROMPTS for our document analysis automation. +# + +queryies1 = ([ + "What is projects title?, Clearly state the name of the project.", + "What is the project description? Provide a concise overview of what the project does and its main features or purpose.", + "How do I install the system? Please provide instructions on how to install and set up the project, including any dependencies or requirements.", + "How do I use the project? Explain how to use the software, including any command-line options, configuration settings, or examples of common usage scenarios.", + "Where do I get the documentation from? Please provide links or instructions for accessing the project's documentation, such as API references, user guides, or tutorials.", + "How can I contribute? Specify guidelines and instructions for contributing to the project, including information about how to submit bug reports, feature requests, or pull requests.", + "What license is used? State the license under which the project is distributed and any relevant copyright or attribution notices.", + "How does the roadmap look like? Outline the project's future plans, upcoming features, or known issues.", + "Who are the authors? List the names or usernames of the main contributors to the project.", + "Acknowledgments: Express gratitude to individuals, organizations, or resources that have contributed to the project or influenced its development." + ], "Nr. 001", "qs-001" ) + +queryies2 = ([ + "What is the projects title?", + "What is the projects description?", + "How do I install the system?", + "How do I use the project?", + "Where do I get the documentation from?", + "How can I contribute?", + "What license is used?", + "How does the roadmap look like?", + "Who are the authors?" + ], "Nr. 002", "qs-002" ) \ No newline at end of file