diff --git a/.gitignore b/.gitignore index 5eaef8b..72ff104 100644 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,9 @@ msvc/**/Release target/bin/linux/x64/Release/* target/lib/linux/x64/Release/* target/build/linux/x64/Release/* + +*.config +*.creator +*.creator.user +*.files +*.includes diff --git a/Makefile b/Makefile index 2e1a969..0c41cc5 100755 --- a/Makefile +++ b/Makefile @@ -1,129 +1,11 @@ -SRC = src/main -INCLUDE = include/main -TARGET = target -BUILD = $(TARGET)/build -CC = gcc -CXX = g++ - -CXXFLAGS = -std=c++11 -I$(INCLUDE) -DOS_LINUX -g -LDFALGS = -lpthread -Ldeps/meshy/target -lmeshy - -COMMON_OBJECTS = \ - $(BUILD)/DataPackage.o \ - $(BUILD)/OutputCollector.o \ - $(BUILD)/BoltExecutor.o \ - $(BUILD)/BoltOutputCollector.o \ - $(BUILD)/CommandDispatcher.o \ - $(BUILD)/MessageLoop.o \ - $(BUILD)/NimbusCommander.o \ - $(BUILD)/SupervisorCommander.o \ - $(BUILD)/SpoutExecutor.o \ - $(BUILD)/SpoutOutputCollector.o \ - $(BUILD)/SimpleTopology.o \ - $(BUILD)/TopologyBuilder.o \ - -NIMBUS_OBJECTS = $(BUILD)/NimbusLauncher.o - -SUPERVISOR_OBJECTS = $(BUILD)/SupervisorLauncher.o - -all: $(TARGET)/nimbus $(TARGET)/supervisor +all: + cd deps/meshy/target/build/linux/x64/Release;make + cd target/build/linux/x64/Release;make clean: - rm -rf $(TARGET)/* - mkdir $(BUILD) - -$(TARGET)/nimbus: $(COMMON_OBJECTS) $(NIMBUS_OBJECTS) - $(CXX) -o $@ $(COMMON_OBJECTS) $(NIMBUS_OBJECTS) $(LDFALGS) - -$(TARGET)/supervisor: $(COMMON_OBJECTS) $(SUPERVISOR_OBJECTS) - $(CXX) -o $@ $(COMMON_OBJECTS) $(SUPERVISOR_OBJECTS) $(LDFALGS) - -$(BUILD)/DataPackage.o: $(SRC)/hurricane/base/DataPackage.cpp \ - $(INCLUDE)/hurricane/base/DataPackage.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/OutputCollector.o: $(SRC)/hurricane/base/OutputCollector.cpp \ - $(INCLUDE)/hurricane/base/OutputCollector.h \ - $(INCLUDE)/hurricane/topology/ITopology.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/BoltExecutor.o: $(SRC)/hurricane/bolt/BoltExecutor.cpp \ - $(INCLUDE)/hurricane/bolt/BoltExecutor.h \ - $(INCLUDE)/hurricane/bolt/BoltMessage.h \ - $(INCLUDE)/hurricane/message/MessageLoop.h \ - $(INCLUDE)/hurricane/base/OutputCollector.h \ - $(INCLUDE)/hurricane/message/SupervisorCommander.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/BoltOutputCollector.o: $(SRC)/hurricane/bolt/BoltOutputCollector.cpp \ - $(INCLUDE)/hurricane/bolt/BoltOutputCollector.h \ - $(INCLUDE)/hurricane/bolt/BoltExecutor.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/CommandDispatcher.o: $(SRC)/hurricane/message/CommandDispatcher.cpp \ - $(INCLUDE)/hurricane/message/CommandDispatcher.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/MessageLoop.o: $(SRC)/hurricane/message/MessageLoop.cpp \ - $(INCLUDE)/hurricane/message/MessageLoop.h \ - $(INCLUDE)/hurricane/message/Message.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/NimbusCommander.o: $(SRC)/hurricane/message/NimbusCommander.cpp \ - $(INCLUDE)/hurricane/message/SupervisorCommander.h \ - $(INCLUDE)/hurricane/base/ByteArray.h \ - $(INCLUDE)/hurricane/base/DataPackage.h \ - $(INCLUDE)/hurricane/message/Command.h \ - $(INCLUDE)/hurricane/message/NimbusCommander.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/SupervisorCommander.o: $(SRC)/hurricane/message/SupervisorCommander.cpp \ - $(INCLUDE)/hurricane/message/SupervisorCommander.h \ - $(INCLUDE)/hurricane/base/ByteArray.h \ - $(INCLUDE)/hurricane/base/DataPackage.h \ - $(INCLUDE)/hurricane/message/Command.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/SpoutExecutor.o: $(SRC)/hurricane/spout/SpoutExecutor.cpp \ - $(INCLUDE)/hurricane/spout/SpoutExecutor.h \ - $(INCLUDE)/hurricane/base/OutputCollector.h \ - $(INCLUDE)/hurricane/message/SupervisorCommander.h \ - $(INCLUDE)/hurricane/spout/SpoutOutputCollector.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/SpoutOutputCollector.o: $(SRC)/hurricane/spout/SpoutOutputCollector.cpp \ - $(INCLUDE)/SpoutOutputCollector.h \ - $(INCLUDE)/hurricane/spout/SpoutExecutor.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/SimpleTopology.o: $(SRC)/hurricane/topology/SimpleTopology.cpp \ - $(INCLUDE)/hurricane/topology/SimpleTopology.h \ - $(INCLUDE)/hurricane/spout/ISpout.h \ - $(INCLUDE)/hurricane/bolt/IBolt.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/TopologyBuilder.o: $(SRC)/hurricane/topology/TopologyBuilder.cpp \ - $(INCLUDE)/hurricane/topology/TopologyBuilder.h \ - $(INCLUDE)/hurricane/spout/ISpout.h \ - $(INCLUDE)/hurricane/bolt/IBolt.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/NimbusLauncher.o: $(SRC)/hurricane/NimbusLauncher.cpp \ - $(INCLUDE)/hurricane/base/NetAddress.h \ - $(INCLUDE)/hurricane/base/ByteArray.h \ - $(INCLUDE)/hurricane/base/DataPackage.h \ - $(INCLUDE)/hurricane/message/CommandDispatcher.h \ - $(INCLUDE)/hurricane/message/NimbusCommander.h \ - $(INCLUDE)/hurricane/base/Node.h - $(CXX) $(CXXFLAGS) -c -o $@ $< - -$(BUILD)/SupervisorLauncher.o: $(SRC)/hurricane/SupervisorLauncher.cpp \ - $(INCLUDE)/hurricane/base/NetAddress.h \ - $(INCLUDE)/hurricane/base/ByteArray.h \ - $(INCLUDE)/hurricane/base/DataPackage.h \ - $(INCLUDE)/hurricane/base/Value.h \ - $(INCLUDE)/hurricane/base/Variant.h \ - $(INCLUDE)/hurricane/message/SupervisorCommander.h \ - $(INCLUDE)/hurricane/message/CommandDispatcher.h - $(CXX) $(CXXFLAGS) -c -o $@ $< + cd target/build/linux/x64/Release;make clean + cd deps/meshy/target/build/linux/x64/Release;make clean +install: + cd deps/meshy/target/build/linux/x64/Release;make install + cd target/build/linux/x64/Release;make install diff --git a/deps/kake/.gitignore b/deps/kake/.gitignore deleted file mode 100755 index a0c7e99..0000000 --- a/deps/kake/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -test -**/__pycache__ -**/*.swp -sample/*/target -sample/*/project/*/target diff --git a/deps/kake/LICENSE b/deps/kake/LICENSE old mode 100755 new mode 100644 index 36d3f27..a0e961c --- a/deps/kake/LICENSE +++ b/deps/kake/LICENSE @@ -1,339 +1,20 @@ - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc., [http://fsf.org/] - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - {description} - Copyright (C) 2013 kinuxroot - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License along - with this program; if not, write to the Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, the commands you use may -be called something other than `show w' and `show c'; they could even be -mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - {signature of Ty Coon}, 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into -proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. \ No newline at end of file +The MIT License (MIT) + +Copyright (c) 2016 kinuxroot + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/kake/README.md b/deps/kake/README.md old mode 100755 new mode 100644 index 7d4a51f..5ccb0b0 --- a/deps/kake/README.md +++ b/deps/kake/README.md @@ -1,40 +1 @@ -#Kake Build System - -#### Brief Introduction to Kake -Kake is a building system following the the "convention over configuration" paradigm - -#### Dependencies -Like some of build systems (e.g. scons), Kake build system is written in python3 and using PyYAML library, thus we need to install Python, libyaml and PyYAML. - -Install the following 3rd-party packages if you are using Ubuntu: - - sudo apt-get install libyaml-0-2 libyaml-dev - sudo apt-get install python3 libpython3-dev python3-pip - -Then you may use pip to install PyYAML: - - sudo pip3 install PyYAML - -#### Install Kake -It's very simple to install Kake build system into your environment, you just have to clone it: -Assume you are going to install Kake build system into ~/apps: - - cd ~/apps - git clone https://git.oschina.net/kinuxroot/kake.git - -Then add one line in ~/.bashrc with your favorite text editor: - - export KAKE_HOME=~/apps/kake - -Finally, add the path kake/bin right after PATH: - - export PATH="${PATH}:${KAKE_HOME}/bin" - -#### Verify Installation -If you type in kake in a folder without Kakefile, it will output: - - Project file not exists. - -This indicates that Kake build system is ready to use in your environment, if you want to use it, just simply type: - - kake \ No newline at end of file +#kake diff --git a/deps/kake/bin/kake b/deps/kake/bin/kake deleted file mode 100755 index f5b653a..0000000 --- a/deps/kake/bin/kake +++ /dev/null @@ -1,8 +0,0 @@ -#! /bin/sh - -if [ -z "$KAKE_HOME" ]; then - echo "The environment variable KAKE_HOME is not set!" - echo "Please set KAKE_HOME before run kake" -else - python3 "${KAKE_HOME}/src/main.py" $* -fi diff --git a/deps/kake/bin/kake.cmd b/deps/kake/bin/kake.cmd deleted file mode 100755 index e955f60..0000000 --- a/deps/kake/bin/kake.cmd +++ /dev/null @@ -1,6 +0,0 @@ -@IF "%KAKE_HOME%"=="" ( - @echo The environment variable KAKE_HOME is not set! - @echo Please set KAKE_HOME before run kake -) ELSE ( - @python3 "%KAKE_HOME%\src\main.py" %* -) diff --git a/deps/kake/bin/pynewmod b/deps/kake/bin/pynewmod deleted file mode 100755 index 542b50f..0000000 --- a/deps/kake/bin/pynewmod +++ /dev/null @@ -1,7 +0,0 @@ -#! /bin/sh - -MODULE_PATH="$1" -PACKAGE_FLAG="__init__.py" - -mkdir "${MODULE_PATH}" -touch "${MODULE_PATH}/${PACKAGE_FLAG}" diff --git a/deps/kake/doc/conf/cpp.txt b/deps/kake/doc/conf/cpp.txt deleted file mode 100755 index 9c66456..0000000 --- a/deps/kake/doc/conf/cpp.txt +++ /dev/null @@ -1,25 +0,0 @@ -make: - configuration: - target: - type: - compiler: - c: - src_exts: - cc: - include_paths: - flags: - cpp: - src_exts: - cc: - include_paths: - flags: - inherit_c_include_path: - fpic: - linker: - autolink: - ld: - flags: - library_paths: - libraries: - archiver: - ar: diff --git a/deps/kake/kake b/deps/kake/kake new file mode 100755 index 0000000..2538b39 --- /dev/null +++ b/deps/kake/kake @@ -0,0 +1,5 @@ +#! /bin/bash + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +node "${DIR}/main.js" $* diff --git a/deps/kake/lib/project.js b/deps/kake/lib/project.js new file mode 100644 index 0000000..2e9649a --- /dev/null +++ b/deps/kake/lib/project.js @@ -0,0 +1,548 @@ +'use strict'; + +let utils = { + fs: require('./util/fs'), + string: require('./util/string'), + child_process: require('./util/child_process') +}; + +let template = require('./template'); +let fs = require('fs'); +let child_process = require('child_process'); + +class Project { + constructor(projectDir, options, solution) { + let self = this; + + this._solution = solution; + this._solutionPath = options.solutionPath; + this._deps = options.deps; + this._basePath = projectDir; + this._completePath = fs.realpathSync(projectDir); + this._kakefile = this._completePath + '/Kakefile'; + let projectConfig = require(this._kakefile); + this._projectConfig = projectConfig; + + this._name = projectConfig.name; + this._version = projectConfig.version; + this._type = projectConfig.type; + this._target = projectConfig.target; + this._targetPath = fs.realpathSync(projectDir + '/' + projectConfig.targetPath); + this._buildPath = this._targetPath + `/build/linux/x64/Release/${this._name}`; + this._binPath = fs.realpathSync(`${this._targetPath}/bin/linux/x64/Release`); + this._libPath = fs.realpathSync(`${this._targetPath}/lib/linux/x64/Release`); + if ( this._target == 'executable' ) { + this._installPath = this._binPath; + } + else { + this._installPath = this._libPath; + } + + // Ignore when the directory exists. + try { + fs.mkdirSync(this._buildPath); + } + catch (e) { + } + this._buildPath = fs.realpathSync(this._buildPath); + + this._compiler = projectConfig.compiler; + this._linker = projectConfig.linker; + this._dependencies = projectConfig.dependencies; + + if ( !this._compiler.exts ) { + this._compiler.exts = ['cpp']; + } + if ( this._target == 'dynamic_library' ) { + if ( this._compiler.cxxflags ) { + this._compiler.cxxflags.push('-fPIC'); + } + else { + this._compiler.cxxflags = ['-fPIC']; + } + } + + this._compiler.src = this._compiler.src.map(srcPath => { + return fs.realpathSync(projectDir + '/' + srcPath); + }); + + let srcDirs = this._compiler.src.filter(srcPath => fs.statSync(srcPath).isDirectory()); + this._compiler.src = this._compiler.src.filter(srcPath => fs.statSync(srcPath).isFile()); + srcDirs.forEach(srcDir => { + let srcFiles = utils.fs.findFiles({ + path: srcDir, + exts: self._compiler.exts + }); + + srcFiles.forEach(srcFile => { + this._compiler.src.push(srcFile); + }); + }); + + if ( this._compiler.useCuda ) { + if ( !this._compiler.nvccExts ) { + this._compiler.nvccExts = ['cu']; + } + + this._compiler.nvccSrc = []; + srcDirs.forEach(srcDir => { + let srcFiles = utils.fs.findFiles({ + path: srcDir, + exts: self._compiler.nvccExts + }); + + srcFiles.forEach(srcFile => { + this._compiler.nvccSrc.push(srcFile); + }); + }); + } + + this._compiler.includePaths = this._compiler.includePaths.map(includePath => { + return fs.realpathSync(projectDir + '/' + includePath); + }); + } + + generateMakefile() { + let configs = this.generateConfig(); + let configLines = Object.keys(configs).map(configKey => { + let configValue = configs[configKey]; + + if ( configValue instanceof Array ) { + configValue = configValue.join(' '); + } + + return `${configKey} := ${configValue}` + }); + + let configContent = configLines.join('\n'); + let configFilePath = `${this._buildPath}/Makefile.config`; + fs.writeFileSync(configFilePath, configContent); + + let depConfigs = this.generateDepConfigs(); + let depConfigsLines = Object.keys(depConfigs).map(configKey => { + let configValue = depConfigs[configKey]; + + if ( configValue instanceof Array ) { + configValue = configValue.join(' '); + } + + return `${configKey} := ${configValue}` + }); + + let depConfigContent = depConfigsLines.join('\n'); + let depConfigFilePath = `${this._buildPath}/../Makefile.deps`; + fs.writeFileSync(depConfigFilePath, depConfigContent); + + let makefileLines = [ + '-include Makefile.config\n', + '-include ../Makefile.deps\n' + ]; + let objectFiles = []; + let dependencyLines = []; + + this._compiler.src.forEach(srcPath => { + let {objectFileName, dependencyLine} = this.generateDependency(srcPath); + + let compilerCommand = this.generateCompilerCommand(srcPath); + dependencyLines.push(dependencyLine); + dependencyLines.push(compilerCommand + '\n\n'); + + objectFiles.push(objectFileName); + }); + + if ( this._compiler.useCuda ) { + this._compiler.nvccSrc.forEach(srcPath => { + let {objectFileName, dependencyLine} = this.generateNvccDependency(srcPath); + + let compilerCommand = this.generateNvccCompilerCommand(srcPath); + dependencyLines.push(dependencyLine); + dependencyLines.push(compilerCommand + '\n\n'); + + objectFiles.push(objectFileName); + }); + } + + makefileLines.push(`OBJS := ${objectFiles.join(' \\\n')}\n`); + + let targetName = this._name; + if ( this._target == 'dynamic_library' ) { + targetName = `lib${targetName}.so`; + } + this._targetName = targetName; + makefileLines.push(`all: ${targetName}\n`); + makefileLines.push(this.generateLinkLine(targetName) + '\n'); + + let binPath = utils.fs.relativePath(this._installPath, this._buildPath); + makefileLines.push(`install: ${binPath}/${targetName}\n`); + makefileLines.push(`${binPath}/${targetName}: ${targetName}`); + makefileLines.push(`\tcp ${targetName} ${binPath}\n`); + + makefileLines.push(`clean:`); + makefileLines.push(`\trm -f ${targetName}`); + makefileLines.push(`\trm -f *.o\n`); + + let makefileContent = `${makefileLines.join('\n')}\n${dependencyLines.join('')}`; + let makefilePath = `${this._buildPath}/Makefile`; + + fs.writeFileSync(makefilePath, makefileContent); + } + + generateLinkLine(targetName) { + let linkLines = [ + `${targetName}: $(OBJS)`, + this.generateLinkerCommand(targetName) + ]; + + return linkLines.join('\n'); + } + + generateLinkerCommand(targetName) { + let command = `\t$(LD) $(OBJS)`; + if ( this._target == 'dynamic_library' ) { + command += ' -shared'; + } + + let libraryPaths = []; + let inputs = []; + + if ( this._dependencies ) { + Object.keys(this._dependencies).forEach(depName => { + let dep = this._deps[depName]; + + if ( !dep ) { + let depProject = this._solution.getProject(depName); + let relLibPath = utils.fs.relativePath(depProject._buildPath, this._buildPath); + + libraryPaths.push(`-L"${relLibPath}"`); + inputs.push(`-l${depProject._name}`); + + return; + } + + if ( dep.scope != 'system' ) { + libraryPaths.push(`-L"$(${depName.toUpperCase()}_LIB)"`); + } + inputs = inputs.concat(dep.inputs.map(input => `-l${input}`)); + }); + } + + let ldflags = ''; + if ( this._linker.ldflags ) { + ldflags = this._linker.ldflags.join(' '); + } + + return `${command} -o ${targetName} ${libraryPaths.join(' ')} ${inputs.join(' ')} ${ldflags}`; + } + + generateConfig() { + let config = { + CXX: this._compiler.cxx, + NVCC: this._compiler.nvcc, + LD: this._linker.ld, + LDFLAGS: this._linker.ldflags, + OS: 'linux', + BITS: '64', + CPU: 'x64', + PLATFORM: '$(OS)/$(CPU)' + }; + + return config; + } + + generateDepConfigs() { + let configs = {}; + + Object.keys(this._deps).forEach(depName => { + let dep = this._deps[depName]; + + if ( dep.scope == 'system' ) { + return; + } + + let includeKey = `${depName}_INCLUDE`.toUpperCase(); + let libKey = `${depName}_LIB`.toUpperCase(); + + let includeValues = dep.include.map(include => { + let includeValue = `${dep.basePath}/${include}`; + if ( !(includeValue.startsWith('/')) ) { + includeValue = utils.fs.relativePath(`${this._solutionPath}/${includeValue}`, this._buildPath); + } + + return `-I"${includeValue}"`; + }); + + let libValue = `${dep.basePath}/${dep.lib}`; + + if ( !(libValue.startsWith('/')) ) { + libValue = utils.fs.relativePath(`${this._solutionPath}/${libValue}`, this._buildPath); + } + + configs[includeKey] = includeValues; + configs[libKey] = libValue; + }); + + return configs; + } + + generateDependency(srcPath) { + console.log(`Resolve dependency: ${srcPath}`); + let cwd = this._buildPath; + let srcRelativePath = utils.fs.relativePath(srcPath, this._buildPath); + let commandArguments = [srcRelativePath, '-c','-MM']; + commandArguments = this.fillCompilerCommandArgs(commandArguments, { + useIncludeVariable: false + }); + + let resolveResult = child_process.execFileSync('g++', commandArguments, { + cwd: cwd, + encoding: 'utf8' + }); + + let fileNameEndIndex = resolveResult.indexOf(':'); + let objectFileName = srcRelativePath; + objectFileName = utils.string.replaceAll(objectFileName, '_', '__'); + objectFileName = utils.string.replaceAll(objectFileName, '-', '--'); + objectFileName = utils.string.replaceAll(objectFileName, '.', '_'); + objectFileName = utils.string.replaceAll(objectFileName, '/', '-'); + objectFileName += '.o'; + + let dependencyLine = objectFileName + + resolveResult + .slice(fileNameEndIndex) + .split('\n') + .filter(line => !(line.trim().startsWith('/'))) + .join('\n'); + + if ( dependencyLine.trim().endsWith('\\') ) { + dependencyLine = dependencyLine.trim(); + dependencyLine = dependencyLine.slice(0, dependencyLine.length - 1) + '\n'; + } + + return { + objectFileName: objectFileName, + dependencyLine: dependencyLine + }; + } + + generateNvccDependency(srcPath) { + console.log(`Resolve dependency: ${srcPath}`); + let cwd = this._buildPath; + let srcRelativePath = utils.fs.relativePath(srcPath, this._buildPath); + let commandArguments = [srcRelativePath, '-c', '--verbose', '--dryrun']; + commandArguments = this.fillNvidiaCompilerCommandArgs(commandArguments, { + useIncludeVariable: false + }); + + let nvccResult = utils.child_process.execFile('nvcc', commandArguments, { + cwd: cwd, + encoding: 'utf8' + }); + + let cbinCommand = nvccResult.stderr.split('\n').find(line => { + return line.startsWith('#$ gcc'); + }); + + let redirectIndex = cbinCommand.indexOf('>'); + cbinCommand = cbinCommand.slice(3, redirectIndex) + ' -MM'; + + let resolveResult = child_process.execSync(cbinCommand, { + cwd: cwd, + encoding: 'utf8' + }); + + let fileNameEndIndex = resolveResult.indexOf(':'); + let objectFileName = srcRelativePath; + objectFileName = utils.string.replaceAll(objectFileName, '_', '__'); + objectFileName = utils.string.replaceAll(objectFileName, '-', '--'); + objectFileName = utils.string.replaceAll(objectFileName, '.', '_'); + objectFileName = utils.string.replaceAll(objectFileName, '/', '-'); + objectFileName += '.o'; + + let dependencyLine = objectFileName + + resolveResult + .slice(fileNameEndIndex) + .split('\n') + .filter(line => !(line.trim().startsWith('/'))) + .join('\n'); + + if ( dependencyLine.trim().endsWith('\\') ) { + dependencyLine = dependencyLine.trim(); + dependencyLine = dependencyLine.slice(0, dependencyLine.length - 1) + '\n'; + } + + return { + objectFileName: objectFileName, + dependencyLine: dependencyLine + }; + } + + generateCompilerCommand(srcPath) { + let srcRelativePath = utils.fs.relativePath(srcPath, this._buildPath); + let commandArguments = ['$(CXX)', srcRelativePath, '-c', '-o', '$@']; + commandArguments = this.fillCompilerCommandArgs(commandArguments, { + useIncludeVariable: true + }); + + return '\t' + commandArguments.join(' '); + } + + generateNvccCompilerCommand(srcPath) { + let srcRelativePath = utils.fs.relativePath(srcPath, this._buildPath); + let commandArguments = ['$(NVCC)', srcRelativePath, '-c', '-o', '$@']; + commandArguments = this.fillNvidiaCompilerCommandArgs(commandArguments, { + useIncludeVariable: true + }); + + return '\t' + commandArguments.join(' '); + } + + fillCompilerCommandArgs(commandArguments, options) { + this._compiler.includePaths.forEach(includePath => { + let incRelPath = utils.fs.relativePath(includePath, this._buildPath); + commandArguments.push(`-I${incRelPath}`); + }); + + if ( this._dependencies ) { + Object.keys(this._dependencies).forEach(depName => { + let dep = this._deps[depName]; + + if ( !dep ) { + let depProject = this._solution.getProject(depName); + depProject._compiler.includePaths.forEach(includePath => { + let incRelPath = utils.fs.relativePath(includePath, this._buildPath); + commandArguments.push(`-I${incRelPath}`); + }); + + return; + } + + if ( dep.scope == 'custom' ) { + if ( options.useIncludeVariable ) { + commandArguments.push(`$(${depName.toUpperCase()}_INCLUDE)`); + } + else { + dep.include.forEach(include => { + let includePath = `${dep.basePath}/${include}`; + if ( !includePath.startsWith('/') ) { + includePath = `${this._solutionPath}/${includePath}`; + let incRelPath = utils.fs.relativePath(includePath, this._buildPath); + commandArguments.push(`-I${incRelPath}`); + } + else { + commandArguments.push(`-I${includePath}`); + } + }); + } + } + }); + } + + if ( this._compiler.std ) { + commandArguments.push(`-std=${this._compiler.std}`); + } + + if ( this._compiler.optimize ) { + commandArguments.push(`-O${this._compiler.optimize}`); + } + else { + commandArguments.push(`-O2`); + } + + commandArguments = commandArguments.concat(this._compiler.cxxflags); + + if ( this._compiler.defines ) { + commandArguments = commandArguments.concat(this._compiler.defines.map(definition => { + return `-D${definition}`; + })); + } + + return commandArguments; + } + + fillNvidiaCompilerCommandArgs(commandArguments, options) { + this._compiler.includePaths.forEach(includePath => { + let incRelPath = utils.fs.relativePath(includePath, this._buildPath); + commandArguments.push(`-I${incRelPath}`); + }); + + if ( this._dependencies ) { + Object.keys(this._dependencies).forEach(depName => { + let dep = this._deps[depName]; + + if ( !dep ) { + let depProject = this._solution.getProject(depName); + depProject._compiler.includePaths.forEach(includePath => { + let incRelPath = utils.fs.relativePath(includePath, this._buildPath); + commandArguments.push(`-I${incRelPath}`); + }); + + return; + } + + if ( dep.scope == 'custom' ) { + if ( options.useIncludeVariable ) { + commandArguments.push(`$(${depName.toUpperCase()}_INCLUDE)`); + } + else { + dep.include.forEach(include => { + let includePath = `${dep.basePath}/${include}`; + if ( !includePath.startsWith('/') ) { + includePath = `${this._solutionPath}/${includePath}`; + let incRelPath = utils.fs.relativePath(includePath, this._buildPath); + commandArguments.push(`-I${incRelPath}`); + } + else { + commandArguments.push(`-I${includePath}`); + } + }); + } + } + }); + } + + if ( this._compiler.std ) { + commandArguments.push(`-std=${this._compiler.std}`); + } + this._compiler.cxxflags.forEach(cxxflag => { + commandArguments.push('-Xcompiler'); + commandArguments.push(cxxflag); + }); + + if ( this._compiler.nvccflags ) { + this._compiler.nvccflags.forEach(nvccflag => { + commandArguments.push(nvccflag); + }); + } + + if ( this._compiler.defines ) { + commandArguments = commandArguments.concat(this._compiler.defines.map(definition => { + return `-D${definition}`; + })); + } + + return commandArguments; + } + +} + +Project.CreateNewProject = function(options) { + let projectName = options.name; + + let projectTemplateContent = template.readTemplateFile('project/CppProject.kake'); + let kakefileContent = projectTemplateContent.replace('CppProject', projectName); + + try { + fs.mkdirSync(projectName); + console.log(`Created directory ${projectName}`); + } + catch (e) { + console.log(`The project dir ${projectName} has been existed. Skip create directory.`); + } + + let kakefilePath = projectName + '/Kakefile'; + fs.writeFileSync(kakefilePath, kakefileContent); + console.log(`Generating ${kakefilePath} ...`); + console.log('Bootstrap finished.'); +} + +module.exports = Project; diff --git a/deps/kake/lib/solution.js b/deps/kake/lib/solution.js new file mode 100644 index 0000000..9b05fc4 --- /dev/null +++ b/deps/kake/lib/solution.js @@ -0,0 +1,122 @@ +'use strict'; + +let fs = require('fs'); +let utils = { + string: require('./util/string') +}; + +let Project = require('./project'); +let template = require('./template'); + +class Solution { + constructor(solutionDir) { + this._basePath = solutionDir; + this._completePath = fs.realpathSync(solutionDir); + this._kakefile = this._completePath + '/Kakefile'; + this._deps = require(this._completePath + '/deps.kake'); + this.processDeps(); + let solutionConfig = require(this._kakefile); + + this._name = solutionConfig.name; + this._version = solutionConfig.version; + this._type = solutionConfig.type; + this._projects = solutionConfig.projects.map(project => { + return new Project(`${this._basePath}/${project}`, { + solutionPath: this._completePath, + deps: this._deps + }, this); + }); + } + + getProject(projectName) { + return this._projects.find(project => { + return project._name == projectName; + }); + } + + processDeps() { + Object.keys(this._deps).forEach(depName => { + let dep = this._deps[depName]; + + if ( !(dep.scope) ) { + dep.scope = 'system'; + } + + if ( !(dep.include) ) { + dep.include = 'include'; + } + + if ( !(dep.lib) ) { + dep.lib = 'lib'; + } + + if ( typeof(dep.include) == 'string' ) { + dep.include = [ dep.include ]; + } + dep.include = dep.include.map(include => { + return utils.string.replaceAll(include, '${Platform}', 'linux/x64'); + }); + dep.lib = utils.string.replaceAll(dep.lib, '${Platform}', 'linux/x64'); + }); + } + + addProject(options) { + let project = Project.CreateNewProject(options); + } + + generateMakefile() { + this._projects.forEach(project => { + project.generateMakefile(); + }); + + let makefileLines = ['all:']; + this._projects.forEach(project => { + makefileLines.push(`\tcd ${project._name};make`); + }); + + makefileLines.push('clean:'); + this._projects.forEach(project => { + makefileLines.push(`\tcd ${project._name};make clean`); + }); + + makefileLines.push('install:'); + this._projects.forEach(project => { + makefileLines.push(`\tcd ${project._name};make install`); + }); + + let makefileContent = makefileLines.join('\n'); + let makefilePath = `${this._projects[0]._targetPath}/build/linux/x64/Release/Makefile`; + console.log(makefilePath); + fs.writeFileSync(makefilePath, makefileContent); + } +} + +Solution.CreateNewSolution = function(options) { + let solutionName = options.name; + + let kakefilePath = solutionName + '/Kakefile'; + console.log(`Generating ${kakefilePath} ...`); + + let solutionTemplateContent = template.readTemplateFile('solution/SolutionSample.kake'); + let kakefileContent = solutionTemplateContent.replace('SampleSolution', solutionName); + + try { + fs.mkdirSync(solutionName); + console.log(`Created directory ${solutionName}`); + } + catch (e) { + console.log(`The solution dir ${solutionName} has been existed. Skip create directory.`); + } + + fs.writeFileSync(kakefilePath, kakefileContent); + + let depsPath = solutionName + '/deps.kake'; + console.log(`Generating ${depsPath} ...`); + + let depsContent = template.readTemplateFile('solution/deps.kake'); + fs.writeFileSync(depsPath, depsContent); + + console.log('Bootstrap finished.'); +} + +module.exports = Solution; diff --git a/deps/kake/lib/template.js b/deps/kake/lib/template.js new file mode 100644 index 0000000..0507fb2 --- /dev/null +++ b/deps/kake/lib/template.js @@ -0,0 +1,14 @@ +'use strict'; + +let fs = require('fs'); + +function readTemplateFile(templateName) { + let templateDir = __dirname + '/../templates/'; + let templatePath = templateDir + templateName; + + return fs.readFileSync(templatePath, 'utf8'); +} + +module.exports = { + readTemplateFile +}; diff --git a/deps/kake/lib/util/child_process.js b/deps/kake/lib/util/child_process.js new file mode 100644 index 0000000..bf10ebd --- /dev/null +++ b/deps/kake/lib/util/child_process.js @@ -0,0 +1,27 @@ +'use strict'; + +let Fiber = require('fibers'); +let child_process = require('child_process'); + +function execFile() { + let args = Array.from(arguments); + let result = {}; + let fiber = Fiber.current; + + args.push((error, stdout, stderr) => { + result.error = error; + result.stdout = stdout; + result.stderr = stderr; + + fiber.run(); + }); + + child_process.execFile.apply(null, args); + Fiber.yield(); + + return result; +} + +module.exports = { + execFile +}; diff --git a/deps/kake/lib/util/cpp_parser.js b/deps/kake/lib/util/cpp_parser.js new file mode 100644 index 0000000..4290d22 --- /dev/null +++ b/deps/kake/lib/util/cpp_parser.js @@ -0,0 +1,203 @@ +'use strict'; + +let fs = require('fs'); + +class CppParser { + parseFile(filePath) { + let fileContent = fs.readFileSync(filePath, 'utf8'); + + let fileMode = this.getFileMode(fileContent); + let lines = fileContent.split(fileMode.lineSeperator); + + this.parseLines(lines); + } + + getFileMode(fileContent) { + if ( fileContent.indexOf('\r\n') ) { + return CppParser.FileMode.DosFile + } + + return CppParser.FileMode.UnixFile; + } + + parseLines(lines) { + let lineCount = lines.length; + + for ( let lineIndex = 0; lineIndex != lineCount; ++ lineIndex ) { + let line = lines[lineIndex]; + + let lineLength = line.length; + console.log(`${lineIndex}: ${line.length}`); + for ( let charIndex = 0; charIndex != lineLength; ++ charIndex ) { + let character = line[charIndex]; + } + } + } +} + +CppParser.FileMode = { + UnixFile: { + type: 'UnixFile', + lineSeperator: '\n' + }, + DosFile: { + type: 'DosFile', + lineSeperator: '\r\n' + } +}; + +CppParser.lexSyntax = [ + { + name: 'string_concat', + start: '##', + pattern: /##/ + }, + { + name: 'make_character', + start: '#@', + pattern: /#@/ + }, + { + name: 'define', + start: '#define', + pattern: /\b#define\b/ + }, + { + name: 'if', + start: '#if', + pattern: /\b#if\b/ + }, + { + name: 'ifdef', + start: '#ifdef', + pattern: /\b#ifdef\b/ + }, + { + name: 'ifndef', + start: '#ifndef', + pattern: /\b#ifndef\b/ + }, + { + name: 'else', + start: '#else', + pattern: /\b#else\b/ + }, + { + name: 'endif', + start: '#endif', + pattern: /\b#endif\b/ + }, + { + name: 'error', + start: '#error', + pattern: /\b#error\b/ + }, + { + name: 'make_string', + start: '#', + pattern: /#/ + }, + { + name: 'string', + start: '"', + pattern: /"[^"]"/ + }, + { + name: 'cpp_comment', + start: '//', + pattern: /\/\/.*$/ + }, + { + name: 'c_comment_start', + start: '/*', + pattern: /\/\*/ + }, + { + name: 'c_comment_end', + start: '*/', + pattern: /\*\// + }, + { + name: 'bracket_open', + start: '(', + pattern: /\(/ + }, + { + name: 'bracket_close', + start: ')', + pattern: /\)/ + }, + { + name: 'comma', + start: ',', + pattern: /,/ + }, + { + name: 'plus', + start: '+', + pattern: /\+/ + }, + { + name: 'minus', + start: '-', + pattern: /-/ + }, + { + name: 'multiply', + start: '*', + pattern: /\*/ + }, + { + name: 'devide', + start: '/', + pattern: /\// + }, + { + name: 'mod', + start: '%', + pattern: /%/ + }, + { + name: 'logic_and', + start: '&&', + pattern: /&&/ + }, + { + name: 'logic_or', + start: '||', + pattern: /||/ + }, + { + name: 'logic_not', + start: '!', + pattern: /!/ + }, + { + name: 'bit_and', + start: '&', + pattern: /&/ + }, + { + name: 'bit_or', + start: '|', + pattern: /|/ + }, + { + name: 'bit_not', + start: '~', + pattern: /~/ + }, + { + name: 'bit_xor', + start: '^', + pattern: /\^/ + }, + { + name: 'word', + start: /[\w]/, + pattern: /[\w][\w\d]*/ + } +]; + +let cppParser = new CppParser(); +cppParser.parseFile('/home/kinuxroot/Projects/authen/AuthenCoreSDK/src/common/caffe-fast-rcnn/caffe/layer.cpp'); diff --git a/deps/kake/lib/util/fs.js b/deps/kake/lib/util/fs.js new file mode 100644 index 0000000..c61cc90 --- /dev/null +++ b/deps/kake/lib/util/fs.js @@ -0,0 +1,122 @@ +'use strict'; + +let fs = require('fs'); + +function relativePath(originPath, basePath) { + let absoluteOriginPath = fs.realpathSync(originPath); + let absoluteBasePath = fs.realpathSync(basePath); + + absoluteOriginPath = replaceAll(absoluteOriginPath, '\\', '/'); + absoluteBasePath = replaceAll(absoluteBasePath, '\\', '/'); + + let originPathParts = absoluteOriginPath.split('/'); + let originFileName = originPathParts[originPathParts.length - 1]; + originPathParts = originPathParts.slice(0, originPathParts.length - 1); + let basePathParts = absoluteBasePath.split('/'); + + let commonPrefixIndex = commonPathPrefix(originPathParts, basePathParts); + let backTimes = basePathParts.length - commonPrefixIndex; + let backPathParts = makeBackPathParts(backTimes); + let remainingPathParts = originPathParts.slice(commonPrefixIndex); + + let finalPathParts = backPathParts.concat(remainingPathParts); + finalPathParts.push(originFileName); + + let finalPath = finalPathParts.join('/'); + + return finalPath; +} + +function findFiles(options) { + let foundFiles = []; + + let rootDir = fs.realpathSync(options.path); + let exts = options.exts; + + let subItems = fs.readdirSync(rootDir); + let subDirs = []; + + subItems.forEach(subItem => { + let subItemPath = rootDir + '/' + subItem; + + let itemStatus = fs.statSync(subItemPath); + if ( itemStatus.isFile() && endsWithExts(subItem, exts) ) { + foundFiles.push(subItemPath); + } + else if ( itemStatus.isDirectory() ) { + subDirs.push(subItemPath); + } + }); + + subDirs.forEach(subDir => { + foundFiles = foundFiles.concat(findFiles({ + path: subDir, + exts: exts + })); + }); + + return foundFiles; +} + +function endsWithExts(fileName, exts) { + exts = exts.map(ext => `.${ext}`); + return exts.reduce((prev, ext) => { + if ( !prev ) { + return fileName.endsWith(ext); + } + + return true; + }, false); +} + +function commonPathPrefix(pathParts1, pathParts2) { + let minLength = min(pathParts1.length, pathParts2.length); + + for ( let partIndex = 0; partIndex < minLength; partIndex ++ ) { + if ( pathParts1[partIndex] != pathParts2[partIndex] ) { + return partIndex; + } + } + + return minLength; +} + +function makeBackPathParts(backTimes) { + let parts = []; + for ( let backTime = 0; backTime < backTimes; backTime ++ ) { + parts.push('..'); + } + + return parts; +} + +function replaceAll(str, cond, rep) { + let origin = str; + while ( true ) { + let result = origin.replace(cond, rep); + if ( result == origin ) { + return result; + } + + origin = result; + } +} + +function min(a, b) { + if ( a < b ) { + return a; + } + + return b; +} + +module.exports = { + relativePath, + findFiles +}; + +//console.log( +//findFiles({ +// path: '../src/detector/caffe-fast-rcnn', +// exts: ['cpp', 'c'] +//})); diff --git a/deps/kake/lib/util/string.js b/deps/kake/lib/util/string.js new file mode 100644 index 0000000..3639577 --- /dev/null +++ b/deps/kake/lib/util/string.js @@ -0,0 +1,9 @@ +'use strict'; + +function replaceAll(str, cond, rep) { + return str.split(cond).join(rep); +} + +module.exports = { + replaceAll +}; diff --git a/deps/kake/main.js b/deps/kake/main.js new file mode 100644 index 0000000..d08204c --- /dev/null +++ b/deps/kake/main.js @@ -0,0 +1,64 @@ +'use strict' + +let Fiber = require('fibers'); +let Solution = require('./lib/solution'); + +function main() { + if ( process.argv.length < 3 ) { + console.log('Usage: kake '); + process.exit(-1); + } + + let action = process.argv[2]; + + if ( action == 'bootstrap' ) { + if ( process.argv.length < 5 ) { + console.log('Usage: kake bootstrap '); + process.exit(-1); + } + + let projectType = process.argv[3]; + let solutionName = process.argv[4]; + + if ( projectType != 'solution' ) { + console.log('Kake only support bootstrap solution now'); + process.exit(-1); + } + + let solution = Solution.CreateNewSolution({ + name: solutionName + }); + } + else if ( action == 'generate' ) { + let solutionDir = '.'; + if ( process.argv.length > 3 ) { + solutionDir = process.argv[3]; + } + + let solution = new Solution(solutionDir); + + solution.generateMakefile(); + } + else if ( action == 'execute' ) { + let solution = new Solution('.'); + + let command = process.argv[3]; + if ( command == 'add' ) { + let itemType = process.argv[4]; + if ( itemType == 'project' ) { + let projectName = process.argv[5]; + solution.addProject({ + name: projectName + }); + } + } + } + else if ( action == 'build' ) { + let solution = new Solution('.'); + } + + + process.exit(); +} + +Fiber(main).run(); diff --git a/deps/kake/package.json b/deps/kake/package.json new file mode 100644 index 0000000..df1b42b --- /dev/null +++ b/deps/kake/package.json @@ -0,0 +1,11 @@ +{ + "name": "kake", + "description": "Makefile generator", + "version": "0.0.1", + "private": true, + "dependencies": { + "fibers": "latest", + "pixl-xml": "latest", + "uuid": "latest" + } +} diff --git a/deps/kake/sample/asmexample/Kakefile b/deps/kake/sample/asmexample/Kakefile deleted file mode 100755 index e709b97..0000000 --- a/deps/kake/sample/asmexample/Kakefile +++ /dev/null @@ -1,12 +0,0 @@ -project: - name: example - version: 0.0.1 - type: cpp - -make: - configuration: - compiler: - asm: - flags: "-f elf64" - linker: - ld: gcc diff --git a/deps/kake/sample/asmexample/src/main/main.asm b/deps/kake/sample/asmexample/src/main/main.asm deleted file mode 100755 index 7e04bad..0000000 --- a/deps/kake/sample/asmexample/src/main/main.asm +++ /dev/null @@ -1,5 +0,0 @@ -global main - -[section .text] -main: - mov eax, 0 diff --git a/deps/kake/sample/cppexample/Kakefile b/deps/kake/sample/cppexample/Kakefile deleted file mode 100755 index 96a2f0e..0000000 --- a/deps/kake/sample/cppexample/Kakefile +++ /dev/null @@ -1,14 +0,0 @@ -project: - name: example - version: 0.0.1 - type: cpp - -make: - configuration: - compiler: - c: - include_paths: [ "/home/stormdev/Application/boost_1_57_0" ] - linker: - ld: g++ - library_paths: [ "/home/stormdev/Application/boost_1_57_0/stage/lib" ] - libraries: [ "boost_regex" ] diff --git a/deps/kake/sample/cppexample/src/main/main.cpp b/deps/kake/sample/cppexample/src/main/main.cpp deleted file mode 100755 index 9733734..0000000 --- a/deps/kake/sample/cppexample/src/main/main.cpp +++ /dev/null @@ -1,18 +0,0 @@ -#include -#include -#include - -int main(){ - std::string line; - boost::regex pat("^Subject: (Re: |Aw: )*(.*)"); - - while ( std::cin ) { - std::getline(std::cin, line); - boost::smatch matches; - if ( boost::regex_match(line, matches, pat) ) { - std::cout << matches[2] << std::endl; - } - } - - return 0; -} diff --git a/deps/kake/sample/gmakeexample/Kakefile b/deps/kake/sample/gmakeexample/Kakefile deleted file mode 100755 index 4817a5c..0000000 --- a/deps/kake/sample/gmakeexample/Kakefile +++ /dev/null @@ -1,26 +0,0 @@ -project: - name: example - version: 0.0.1 - type: gmake - -make: - rules: - all: - dependencies: - - example - example: - dependencies: - - main.o - - hello.o - actions: - - { command: gcc, args: [ "-o", "example", "main.o", "hello.o" ] } - main.o: - dependencies: - - main.c - actions: - - { command: gcc, args: [ "-c", "-o", "main.o", "main.c"] } - hello.o: - dependencies: - - hello.c - actions: - - { command: gcc, args: [ "-c", "-o", "hello.o", "hello.c"] } diff --git a/deps/kake/sample/testc/Kakefile b/deps/kake/sample/testc/Kakefile deleted file mode 100755 index eae7327..0000000 --- a/deps/kake/sample/testc/Kakefile +++ /dev/null @@ -1,11 +0,0 @@ -project: - name: testc - version: 0.0.1 - type: c - -make: - configuration: - compiler: - cflags: -std=c99 - linker: - autolink: true diff --git a/deps/kake/sample/testc/include/main/hello.h b/deps/kake/sample/testc/include/main/hello.h deleted file mode 100755 index 9bb085a..0000000 --- a/deps/kake/sample/testc/include/main/hello.h +++ /dev/null @@ -1 +0,0 @@ -void hello(); diff --git a/deps/kake/sample/testc/src/main/hello/hello.c b/deps/kake/sample/testc/src/main/hello/hello.c deleted file mode 100755 index a75545f..0000000 --- a/deps/kake/sample/testc/src/main/hello/hello.c +++ /dev/null @@ -1,5 +0,0 @@ -#include - -void hello(){ - printf("hello,world\n"); -} diff --git a/deps/kake/sample/testc/src/main/main.c b/deps/kake/sample/testc/src/main/main.c deleted file mode 100755 index 42b96db..0000000 --- a/deps/kake/sample/testc/src/main/main.c +++ /dev/null @@ -1,17 +0,0 @@ -#include -#include -#include - -int main(int argc, char** argv){ - hello(); - FILE* f = fopen(argv[1], "r"); - if ( f ) { - char str[80]; - memset(str, '\0', 80); - - fgets(str, 80, f); - puts(str); - } - - return 0; -} diff --git a/deps/kake/sample/testcpp/Kakefile b/deps/kake/sample/testcpp/Kakefile deleted file mode 100755 index 20dbcae..0000000 --- a/deps/kake/sample/testcpp/Kakefile +++ /dev/null @@ -1,4 +0,0 @@ -project: - name: testcpp - version: 0.0.1 - type: cpp diff --git a/deps/kake/sample/testcpp/include/main/hello/hello.h b/deps/kake/sample/testcpp/include/main/hello/hello.h deleted file mode 100755 index 895d897..0000000 --- a/deps/kake/sample/testcpp/include/main/hello/hello.h +++ /dev/null @@ -1,11 +0,0 @@ -/** @file: hello.h - * @brief: Hello header file - * @date: 2015年03月06日 18时27分21秒 - * @author: kinuxroot, kinuxroot@163.com - * @group: LessOS Development Group - * @version: 1.0 - * @note: - * - */ - -void hello(); diff --git a/deps/kake/sample/testcpp/src/main/hello/hello.cpp b/deps/kake/sample/testcpp/src/main/hello/hello.cpp deleted file mode 100755 index dddbc87..0000000 --- a/deps/kake/sample/testcpp/src/main/hello/hello.cpp +++ /dev/null @@ -1,15 +0,0 @@ -/** @file: hello.cpp - * @brief: Hello implementation - * @date: 2015年03月06日 18时26分49秒 - * @author: kinuxroot, kinuxroot@163.com - * @group: LessOS Development Group - * @version: 1.0 - * @note: - * - */ - -#include - -void hello(){ - std::cout << "hello" << std::endl; -} diff --git a/deps/kake/sample/testcpp/src/main/main.cpp b/deps/kake/sample/testcpp/src/main/main.cpp deleted file mode 100755 index 44a78f1..0000000 --- a/deps/kake/sample/testcpp/src/main/main.cpp +++ /dev/null @@ -1,21 +0,0 @@ -/** @file: main.cpp - * @brief: Main cpp file - * @date: 2015年03月06日 18时20分50秒 - * @author: kinuxroot, kinuxroot@163.com - * @group: LessOS Development Group - * @version: 1.0 - * @note: - * - */ - -#include "hello/hello.h" -#include - -int main(int argc, char** argv){ - hello(); - if ( argc > 1 ) { - std::cout << argv[1] << std::endl; - } - - return 0; -} diff --git a/deps/kake/sample/testsln/Kakefile b/deps/kake/sample/testsln/Kakefile deleted file mode 100755 index 1f3a17b..0000000 --- a/deps/kake/sample/testsln/Kakefile +++ /dev/null @@ -1,7 +0,0 @@ -project: - name: testsln - version: 0.0.1 - type: solution - -run: - default: testld diff --git a/deps/kake/sample/testsln/project/testc/Kakefile b/deps/kake/sample/testsln/project/testc/Kakefile deleted file mode 100755 index ed3d023..0000000 --- a/deps/kake/sample/testsln/project/testc/Kakefile +++ /dev/null @@ -1,11 +0,0 @@ -project: - name: testc - version: 0.0.1 - type: cpp - -make: - configuration: - compiler: - cflags: -std=c99 - linker: - autolink: false diff --git a/deps/kake/sample/testsln/project/testc/include/main/hello.h b/deps/kake/sample/testsln/project/testc/include/main/hello.h deleted file mode 100755 index 9bb085a..0000000 --- a/deps/kake/sample/testsln/project/testc/include/main/hello.h +++ /dev/null @@ -1 +0,0 @@ -void hello(); diff --git a/deps/kake/sample/testsln/project/testc/src/main/hello/hello.c b/deps/kake/sample/testsln/project/testc/src/main/hello/hello.c deleted file mode 100755 index a75545f..0000000 --- a/deps/kake/sample/testsln/project/testc/src/main/hello/hello.c +++ /dev/null @@ -1,5 +0,0 @@ -#include - -void hello(){ - printf("hello,world\n"); -} diff --git a/deps/kake/sample/testsln/project/testc/src/main/main.c b/deps/kake/sample/testsln/project/testc/src/main/main.c deleted file mode 100755 index e008c72..0000000 --- a/deps/kake/sample/testsln/project/testc/src/main/main.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include -#include - -int main(){ - hello(); - FILE* f = fopen("1.txt", "r"); - char str[80]; - memset(str, '\0', 80); - - fgets(str, 80, f); - puts(str); - - return 0; -} diff --git a/deps/kake/sample/testsln/project/testld/Kakefile b/deps/kake/sample/testsln/project/testld/Kakefile deleted file mode 100755 index f99b97a..0000000 --- a/deps/kake/sample/testsln/project/testld/Kakefile +++ /dev/null @@ -1,14 +0,0 @@ -project: - name: testld - version: 0.0.1 - type: ld - -dependencies: - - { name: testc, source: { type: solution } } - -make: - configuration: - target: - type: executable - linker: - include: [ testc ] diff --git a/deps/kake/src/backends/__init.py__ b/deps/kake/src/backends/__init.py__ deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/backends/gmake/__init.py__ b/deps/kake/src/backends/gmake/__init.py__ deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/backends/gmake/engine.py b/deps/kake/src/backends/gmake/engine.py deleted file mode 100755 index 73943a9..0000000 --- a/deps/kake/src/backends/gmake/engine.py +++ /dev/null @@ -1,341 +0,0 @@ -from util import FileUtil, Path, PathUtil -import os - -class Document(object): - def __init__(self, documentPath): - self.path = documentPath - self.rules = [] - self.subDocuments = [] - pass - - def __str__(self): - return "GNU Makefile Document Object" - - def getPath(self): - return self.path - - def addRule(self, rule): - self.rules.append(rule) - - def addSubDocument(self, subDocument): - self.subDocuments.append(subDocument) - - def writeToFile(self): - makefileContent = self.toText() - makefile = FileUtil.openFile(self.path) - makefile.writelines(makefileContent) - - for subDocument in self.subDocuments: - subDocument.writeToFile() - - makefile.close() - - def toText(self): - makefileContent = [] - for rule in self.rules: - makefileContent.append(rule.toText()) - makefileContent.append('') - - for subDocument in self.subDocuments: - includeInstruction = "-include %(path)s" % { "path": subDocument.getPath() } - makefileContent.append(includeInstruction) - - return '\n'.join(makefileContent) - -class Rule(object): - def __init__(self): - self.prebuildActions = [] - self.buildActions = [] - self.postbuildActions = [] - self.target = "" - pass - - def __str__(self): - return "GNU Makefile Rule Object" - - def addPrebuildAction(self, action): - self.prebuildActions.append(action) - - def addBuildAction(self, action): - self.buildActions.append(action) - - def addPostbuildAction(self, action): - self.postbuildActions.append(action) - - def setTarget(self, target): - self.target = target - - def getTarget(self): - return self.target - - def toText(self): - textContent = [] - textContent.append(self.getTargetLine()) - - for prebuildAction in self.prebuildActions: - textContent.append('\t' +prebuildAction.toText()) - - for buildAction in self.buildActions: - textContent.append('\t' + buildAction.toText()) - - for postbuildAction in self.postbuildActions: - textContent.append('\t' + postbuildAction.toText()) - - return '\n'.join(textContent) - - def getTargetLine(self): - raise NotImplementedError() - -class Action(object): - def __init__(self): - pass - - def __str__(self): - return "GNU Makefile Action Object" - - def toText(self): - raise NotImplementedError() - -class SimpleRule(Rule): - def __init__(self, target, prerequisites): - Rule.__init__(self) - self.setTarget(target) - self.setPrerequisites(prerequisites) - pass - - def setPrerequisites(self, prerequisites): - self.prerequisites = prerequisites - - def getPrerequisites(self): - return self.prerequisites - - def getTargetLine(self): - shellStrings = [] - for prerequisite in self.prerequisites: - shellStrings.append(prerequisite.shellString()) - - return '%(target)s: %(prerequisites)s' % { - 'target': self.getTarget(), - 'prerequisites': ' '.join(shellStrings) - } - -class GCCCompileRule(Rule): - def __init__(self, sourceFilePath, includePaths, sourceDirPath, objectDirPath, makeConf): - Rule.__init__(self) - - relPath = sourceFilePath.getRelevantPath(sourceDirPath) - dirName = relPath.getDirName() - baseName = relPath.getBaseName() - - objectSubDirPath = objectDirPath.join(dirName) - objectFilePath = objectSubDirPath.join(PathUtil.getPrefix(baseName)) - if sourceFilePath.getExt() not in makeConf['asm_src_exts']: - objectFilePath.appendExt('o') - if sourceFilePath.getExt() in makeConf['asm_src_exts'] and makeConf['asm_with_object_ext']: - objectFilePath.appendExt('o') - self.setTarget(objectFilePath.shellString()) - - self.sourceFilePath = sourceFilePath - self.includePaths = includePaths - self.objectSubDirPath = objectSubDirPath - - if self.sourceFilePath.getExt() in makeConf['c_src_exts']: - self.includePaths.extend(PathUtil.toPathList(makeConf['c_include_paths'])) - elif self.sourceFilePath.getExt() in makeConf['cxx_src_exts']: - if makeConf['cxx_using_c_include_paths']: - self.includePaths.extend(PathUtil.toPathList(makeConf['c_include_paths'])) - self.includePaths.extend(PathUtil.toPathList(makeConf['cxx_include_paths'])) - elif self.sourceFilePath.getExt() in makeConf['asm_src_exts']: - self.includePaths.extend(PathUtil.toPathList(makeConf['asm_include_paths'])) - - self.targetLine = self.generateTargetLine(makeConf) - - makeDirectoryAction = MakeDirectoryAction(objectSubDirPath) - compileAction = GCCCompileAction(objectFilePath, sourceFilePath, includePaths, makeConf) - self.addBuildAction(makeDirectoryAction) - self.addBuildAction(compileAction) - - def generateTargetLine(self, makeConf): - includePathShellStrings = [] - for includePath in self.includePaths: - includePathShellStrings.append('-I' + includePath.shellString() + '/') - - dependChecker = '' - compilerFlags = '' - if self.sourceFilePath.getExt() in makeConf['c_src_exts']: - dependChecker = makeConf['cc'] - compilerFlags = makeConf['cflags'] - elif self.sourceFilePath.getExt() in makeConf['cxx_src_exts']: - dependChecker = makeConf['cxxc'] - compilerFlags = makeConf['cxxflags'] - elif self.sourceFilePath.getExt() in makeConf['asm_src_exts']: - dependChecker = makeConf['as'] - compilerFlags = makeConf['asflags'] - - dependOption = '' - if self.sourceFilePath.getExt() in makeConf['c_src_exts']: - dependOption = '-MM' - elif self.sourceFilePath.getExt() in makeConf['cxx_src_exts']: - dependOption = '-MM' - elif self.sourceFilePath.getExt() in makeConf['asm_src_exts']: - dependOption = '-M' - - gccCheckCommand = ('%(depend_checker)s %(compiler_flags)s %(depend_option)s %(filename)s %(include_main_path)s' % - { - 'depend_checker': dependChecker, - 'compiler_flags': compilerFlags, - 'depend_option': dependOption, - 'filename': self.sourceFilePath, - 'include_main_path': ' '.join(includePathShellStrings) - }) - gccCheckResult = os.popen(gccCheckCommand).readlines() - - targetLineContent = [] - if len(gccCheckResult) > 0: - if self.sourceFilePath.getExt() in makeConf['c_src_exts'] or self.sourceFilePath.getExt() in makeConf['cxx_src_exts']: - gccCheckResult[0] = str(self.objectSubDirPath.join(gccCheckResult[0])) - elif self.sourceFilePath.getExt() in makeConf['asm_src_exts']: - gccCheckResult[0] = str(self.getTarget() + gccCheckResult[0]) - - for line in gccCheckResult: - outputLine = str(line).rstrip() - if len(outputLine) == 0 : - continue - if outputLine[-1] == '/': - outputLine = outputLine[:-1] + '\\' - targetLineContent.append(outputLine) - - return '\n'.join(targetLineContent) - - def getTargetLine(self): - return self.targetLine - -class GCCCompileAction(Action): - def __init__(self, objectFilePath, sourceFilePath, includePaths, makeConf): - Action.__init__(self) - self.objectFilePath = objectFilePath - self.sourceFilePath = sourceFilePath - self.includePaths = includePaths - self.makeConf = makeConf - pass - - def toText(self): - includePathShellStrings = [] - for includePath in self.includePaths: - includePathShellStrings.append('-I' + includePath.shellString() + '/') - - compiler = '' - compilerOption = '' - compilerFlags = '' - if self.sourceFilePath.getExt() in self.makeConf['c_src_exts']: - compiler = self.makeConf['cc'] - compilerOption = '-c' - compilerFlags = self.makeConf['cflags'] - elif self.sourceFilePath.getExt() in self.makeConf['cxx_src_exts']: - compiler = self.makeConf['cxxc'] - compilerOption = '-c' - compilerFlags = self.makeConf['cxxflags'] - elif self.sourceFilePath.getExt() in self.makeConf['asm_src_exts']: - compiler = self.makeConf['as'] - compilerOption = '' - compilerFlags = self.makeConf['asflags'] - - dynamic_cflags = '' - if self.makeConf['target_type'] == 'dynamic_library' and self.makeConf['fpic']: - dynamic_cflags = ' -fPIC' - - compileCommand = ('%(compiler)s %(cflags)s %(compiler_option)s -o %(objectfile)s %(include_main_path)s %(srcfile)s' % - { 'include_main_path': ' '.join(includePathShellStrings), - 'compiler': compiler, - 'cflags': compilerFlags + dynamic_cflags, - 'compiler_option': compilerOption, - 'objectfile': self.objectFilePath.shellString(), - 'srcfile': self.sourceFilePath.shellString() }) - - return compileCommand - -class GCCLinkRule(SimpleRule): - def __init__(self, targetFilePath, objectFilePaths, makeConf): - SimpleRule.__init__(self, targetFilePath, objectFilePaths) - linkAction = GCCLinkAction(targetFilePath, objectFilePaths, makeConf) - self.addBuildAction(linkAction) - -class GCCLinkAction(Action): - def __init__(self, targetFilePath, objectFilePaths, makeConf): - Action.__init__(self) - self.targetFilePath = targetFilePath - self.objectFilePaths = objectFilePaths - self.makeConf = makeConf - pass - - def toText(self): - shellStrings = [] - for objectFilePath in self.objectFilePaths: - shellStrings.append(objectFilePath.shellString()) - - libraries = self.makeConf['libraries'] - libraryFlags = [] - for library in libraries: - libraryFlags.append('-l' + library) - - libraryPaths = self.makeConf['ld_library_paths'] - libraryPathFlags = [] - for libraryPath in libraryPaths: - libraryPathFlags.append('-L' + libraryPath) - - dynamic_ldflags = '' - if self.makeConf['target_type'] == 'dynamic_library': - dynamic_ldflags = ' -shared' - - linkCommand = ('%(linker)s %(ldflags)s -o %(target_file)s %(object_files)s %(library_path_flags)s %(library_flags)s' % { - 'final_file': self.targetFilePath.shellString(), - 'object_files': ' '.join(shellStrings), - 'linker': self.makeConf['ld'], - 'ldflags': self.makeConf['ldflags'] + dynamic_ldflags, - 'target_file': self.targetFilePath.shellString(), - 'library_path_flags': ' '.join(libraryPathFlags), - 'library_flags': ' '.join(libraryFlags) - }) - - return linkCommand - -class ArRule(SimpleRule): - def __init__(self, targetFilePath, objectFilePaths, makeConf): - SimpleRule.__init__(self, targetFilePath, objectFilePaths) - arAction = ArAction(targetFilePath, objectFilePaths, makeConf) - self.addBuildAction(arAction) - -class ArAction(Action): - def __init__(self, targetFilePath, objectFilePaths, makeConf): - Action.__init__(self) - self.targetFilePath = targetFilePath - self.objectFilePaths = objectFilePaths - self.makeConf = makeConf - pass - - def toText(self): - shellStrings = [] - for objectFilePath in self.objectFilePaths: - shellStrings.append(objectFilePath.shellString()) - - arCommand = ('%(ar)s cr %(target_file)s %(object_files)s' % { - 'ar': self.makeConf['ar'], - 'target_file': self.targetFilePath.shellString(), - 'object_files': ' '.join(shellStrings) - }) - - return arCommand - -class MakeDirectoryAction(Action): - def __init__(self, directoryPath): - Action.__init__(self) - self.directoryPath = directoryPath - pass - - def toText(self): - mkDirCommand = 'mkdir -p %(directory_path)s' % { - 'directory_path': self.directoryPath.shellString() - } - - return mkDirCommand - diff --git a/deps/kake/src/main.py b/deps/kake/src/main.py deleted file mode 100755 index 30f89da..0000000 --- a/deps/kake/src/main.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import getopt -import sys -import yaml - -from modules.module import isStandardPhase, isPhaseWithConfig, isPhaseWithoutConfig, ModuleManager -from util import Configuration, ConsoleLogger - -DEFAULT_FILENAME = 'Kakefile' -WORK_PATH = '.' -logger = ConsoleLogger('main') - -def usage(): - print( - """Kake: An advanced make tool for KLUM - -h --help Show help - -v --verbose Verbose mode - -f --filename= Specify another project filename or will use default project filename - """) - sys.exit() - -def reportWrongArguments(): - logger.fatal('Error occurs when process arguments') - sys.exit() - -def reportNoProjectFile(): - logger.fatal('Project file not exists.') - sys.exit() - -def reportWrongPhase(): - logger.fatal('Phase is not standard phase.') - sys.exit() - -def existProjectFile(projectFilename): - for filename in os.listdir(WORK_PATH): - if os.path.isfile(filename) and projectFilename == filename: - return True - - return False - -def processArguments(globalConfig): - try: - options,args = getopt.getopt(sys.argv[1:], - "hvf:", ["help", "verbose", "filename="]) - except getopt.GetoptError: - reportWrongArguments() - - remainingArguments = args - for name, value in options: - if name in ("-h", "--help"): - usage() - if name in ("-v", "--verbose"): - globalConfig.setItem('mode.verbose', True) - if name in ("-f", "--filename"): - globalConfig.setItem('project.filename', value) - - return remainingArguments - -def kake(globalConfig, remainingArguments): - makePhase = globalConfig.getItem('make.phase') - if not isStandardPhase(makePhase): - reportWrongPhase() - - moduleManager = ModuleManager() - - if isPhaseWithConfig(makePhase): - projectFilename = globalConfig.getItem('project.filename') - if existProjectFile(projectFilename) == False: - reportNoProjectFile() - - projectFile = open(projectFilename) - projectFileContent = projectFile.read() - projectFile.close() - projectConfig = yaml.load(projectFileContent) - projectConfig['global'] = globalConfig.toDictionary() - projectConfig['global']['args'] = remainingArguments - - module = moduleManager.getModule(projectConfig['project']['type']) - moduleAction = moduleManager.getModuleAction(module, makePhase) - moduleAction(Configuration.fromDictionary(projectConfig)) - elif isPhaseWithoutConfig(makePhase): - module = moduleManager.getModule(remainingArguments[0]) - moduleAction = moduleManager.getModuleAction(module, makePhase) - moduleAction() - -def main(): - globalConfig = Configuration() - globalConfig.setItem('mode.verbose', False) - globalConfig.setItem('project.filename', DEFAULT_FILENAME) - globalConfig.setItem('make.phase', 'build') - - remainingArguments = processArguments(globalConfig) - - if len(remainingArguments) > 0: - globalConfig.setItem('make.phase', remainingArguments[0]) - remainingArguments = remainingArguments[1:] - - kake(globalConfig, remainingArguments) - -if __name__ == "__main__": - main() diff --git a/deps/kake/src/modules/__init__.py b/deps/kake/src/modules/__init__.py deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/modules/asm/__init__.py b/deps/kake/src/modules/asm/__init__.py deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/modules/cpp/__init__.py b/deps/kake/src/modules/cpp/__init__.py deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/modules/cpp/module.py b/deps/kake/src/modules/cpp/module.py deleted file mode 100755 index a4ede63..0000000 --- a/deps/kake/src/modules/cpp/module.py +++ /dev/null @@ -1,242 +0,0 @@ -import os -import sys -import modules.module - -from util import Path, PathUtil, FileUtil, Configuration, ConsoleLogger -from backends.gmake.engine import Document as GMakeDocument -from backends.gmake.engine import SimpleRule as GMakeSimpleRule -from backends.gmake.engine import GCCCompileRule -from backends.gmake.engine import GCCLinkRule -from backends.gmake.engine import ArRule - -SRC_PATH = Path('src', True) -INCLUDE_PATH = Path('include', True) -TARGET_PATH = Path('target', True) -SRC_MAIN_PATH = SRC_PATH.join('main') -SRC_TEST_PATH = SRC_PATH.join('test') -INCLUDE_MAIN_PATH = INCLUDE_PATH.join('main') -INCLUDE_TEST_PATH = INCLUDE_PATH.join('test') -TARGET_OBJECT_PATH = TARGET_PATH.join('object') -TARGET_OBJECT_MAIN_PATH = TARGET_OBJECT_PATH.join('main') -TARGET_SUBMAKE_PATH = TARGET_PATH.join('submake') -TARGET_SUBMAKE_MAIN_PATH = TARGET_SUBMAKE_PATH.join('main') -TARGET_MAKEFILE_PATH = TARGET_PATH.join('Makefile') -FINAL_TARGET_SUBMAKE_PATH = TARGET_SUBMAKE_PATH.join('final_target.mk') - -class Module(modules.module.Module): - mainSourceFiles = [] - objectFiles = [] - defaultConf = {} - - def __init__(self): - modules.module.Module.__init__(self) - self.defaultConf['target_type'] = 'executable' - self.defaultConf['cc'] = 'gcc' - self.defaultConf['cxxc'] = 'g++' - self.defaultConf['as'] = 'nasm' - self.defaultConf['cflags'] = '' - self.defaultConf['cxxflags'] = '' - self.defaultConf['asflags'] = '' - self.defaultConf['fpic'] = True - self.defaultConf['autolink'] = True - self.defaultConf['ar'] = 'ar' - self.defaultConf['ld'] = 'g++' - self.defaultConf['ldflags'] = '' - self.defaultConf['ld_library_paths'] = [] - self.defaultConf['c_src_exts'] = ['.c'] - self.defaultConf['cxx_src_exts'] = ['.cpp'] - self.defaultConf['asm_src_exts'] = ['.asm'] - self.defaultConf['c_include_paths'] = [] - self.defaultConf['cxx_include_paths'] = [] - self.defaultConf['cxx_using_c_include_paths'] = True - self.defaultConf['asm_include_paths'] = [] - self.defaultConf['asm_with_object_ext'] = True - self.logger = ConsoleLogger('cmodule') - - def initKakefile(self): - kakefile = FileUtil.openFile('Kakefile') - kakefileContent = """project: - name: ${name} - version: ${version} - type: cpp""" - kakefile.write(kakefileContent) - kakefile.close() - - return True - - def init(self): - FileUtil.createDirectory(SRC_PATH) - FileUtil.createDirectory(INCLUDE_PATH) - FileUtil.createDirectory(TARGET_PATH) - FileUtil.createDirectory(SRC_MAIN_PATH) - FileUtil.createDirectory(SRC_TEST_PATH) - FileUtil.createDirectory(INCLUDE_MAIN_PATH) - FileUtil.createDirectory(INCLUDE_TEST_PATH) - - return True - - def make(self, projectConfig): - makeConf = { - 'target_type': projectConfig.getItem('make.configuration.target.type', self.defaultConf['target_type']), - 'cc': projectConfig.getItem('make.configuration.compiler.c.cc', self.defaultConf['cc']), - 'cxxc': projectConfig.getItem('make.configuration.compiler.cpp.cc', self.defaultConf['cxxc']), - 'as': projectConfig.getItem('make.configuration.compiler.asm.as', self.defaultConf['as']), - 'cflags': projectConfig.getItem('make.configuration.compiler.c.flags', self.defaultConf['cflags']), - 'cxxflags': projectConfig.getItem('make.configuration.compiler.cpp.flags', self.defaultConf['cxxflags']), - 'asflags': projectConfig.getItem('make.configuration.compiler.asm.flags', self.defaultConf['asflags']), - 'fpic': projectConfig.getItem('make.configuration.compiler.fpic', self.defaultConf['fpic']), - 'autolink': projectConfig.getItem('make.configuration.linker.autolink', self.defaultConf['autolink']), - 'ar': projectConfig.getItem('make.configuration.archiver.ar', self.defaultConf['ar']), - 'ld': projectConfig.getItem('make.configuration.linker.ld', self.defaultConf['ld']), - 'ldflags': projectConfig.getItem('make.configuration.linker.flags', self.defaultConf['ldflags']), - 'ld_library_paths': projectConfig.getItem('make.configuration.linker.library_paths', self.defaultConf['ld_library_paths']), - 'libraries': projectConfig.getItem('make.configuration.linker.libraries', []), - 'c_src_exts': projectConfig.getItem('make.configuration.compiler.c.src_exts', self.defaultConf['c_src_exts']), - 'cxx_src_exts': projectConfig.getItem('make.configuration.compiler.cpp.src_exts', self.defaultConf['cxx_src_exts']), - 'asm_src_exts': projectConfig.getItem('make.configuration.compiler.asm.src_exts', self.defaultConf['asm_src_exts']), - 'c_include_paths': projectConfig.getItem('make.configuration.compiler.c.include_paths', self.defaultConf['c_include_paths']), - 'cxx_include_paths': projectConfig.getItem('make.configuration.compiler.cpp.include_paths', self.defaultConf['cxx_include_paths']), - 'cxx_using_c_include_paths': projectConfig.getItem('make.configuration.compiler.cpp.inherit_c_include_path', self.defaultConf['cxx_using_c_include_paths']), - 'asm_include_paths': projectConfig.getItem('make.configuration.compiler.asm.include_paths', self.defaultConf['asm_include_paths']), - 'asm_with_object_ext': projectConfig.getItem('make.configuration.compiler.asm.with_object_ext', self.defaultConf['asm_with_object_ext']), - } - - makefile = GMakeDocument(TARGET_MAKEFILE_PATH) - - self.mainSourceFiles.clear() - self.objectFiles.clear() - cSourceFiles = [] - cppSourceFiles = [] - asmSourceFiles = [] - - FileUtil.searchAllFiles(cSourceFiles, SRC_MAIN_PATH, makeConf['c_src_exts']) - FileUtil.searchAllFiles(cppSourceFiles, SRC_MAIN_PATH, makeConf['cxx_src_exts']) - FileUtil.searchAllFiles(asmSourceFiles, SRC_MAIN_PATH, makeConf['asm_src_exts']) - - self.mainSourceFiles.extend(cSourceFiles) - self.mainSourceFiles.extend(cppSourceFiles) - self.mainSourceFiles.extend(asmSourceFiles) - - for fileName in self.mainSourceFiles: - filePath = Path(fileName, True) - relPath = filePath.getRelevantPath(SRC_MAIN_PATH) - dirName = relPath.getDirName() - baseName = relPath.getBaseName() - - subMakeDirPath = TARGET_SUBMAKE_MAIN_PATH.join(dirName) - FileUtil.createDirectory(subMakeDirPath) - - subMakefilePath = subMakeDirPath.join(PathUtil.getPrefix(baseName)) - subMakefilePath.appendExt('mk') - subMakefile = GMakeDocument(subMakefilePath) - - compileRule = GCCCompileRule(filePath, [INCLUDE_MAIN_PATH], SRC_MAIN_PATH, TARGET_OBJECT_MAIN_PATH, makeConf) - subMakefile.addRule(compileRule) - makefile.addSubDocument(subMakefile) - - objectFilePath = Path(compileRule.getTarget()) - self.objectFiles.append(objectFilePath) - - if makeConf['autolink']: - if makeConf['target_type'] == 'executable': - subMakefile = GMakeDocument(FINAL_TARGET_SUBMAKE_PATH) - - finalFileName = '%(name)s-%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - finalFilePath = TARGET_PATH.join(finalFileName) - - linkRule = GCCLinkRule(finalFilePath, self.objectFiles, makeConf) - subMakefile.addRule(linkRule) - - makefile.addSubDocument(subMakefile) - - allRule = GMakeSimpleRule('all', [finalFilePath]) - makefile.addRule(allRule) - elif makeConf['target_type'] == 'dynamic_library': - subMakefile = GMakeDocument(FINAL_TARGET_SUBMAKE_PATH) - - finalFileName = 'lib%(name)s.so.%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - finalFilePath = TARGET_PATH.join(finalFileName) - - linkRule = GCCLinkRule(finalFilePath, self.objectFiles, makeConf) - subMakefile.addRule(linkRule) - - makefile.addSubDocument(subMakefile) - - allRule = GMakeSimpleRule('all', [finalFilePath]) - makefile.addRule(allRule) - elif makeConf['target_type'] == 'static_library': - subMakefile = GMakeDocument(FINAL_TARGET_SUBMAKE_PATH) - - finalFileName = 'lib%(name)s.a.%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - finalFilePath = TARGET_PATH.join(finalFileName) - - arRule = ArRule(finalFilePath, self.objectFiles, makeConf) - subMakefile.addRule(arRule) - - makefile.addSubDocument(subMakefile) - - allRule = GMakeSimpleRule('all', [finalFilePath]) - makefile.addRule(allRule) - else: - self.logger.warn('target_type is not recognized!') - sys.exit(1) - else: - allRule = GMakeSimpleRule('all', self.objectFiles) - makefile.addRule(allRule) - - makefile.writeToFile() - - return True - - def build(self, projectConfig): - returnValue = os.system('make -f %(makefile)s' % { - 'makefile': TARGET_PATH.join('Makefile').shellString() }) - - if returnValue == 0: - return True - else: - return False - - def test(self): - pass - - def package(self): - pass - - def deploy(self): - pass - - def run(self, projectConfig): - finalFileName = '%(name)s-%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0') } - finalFilePath = TARGET_PATH.join(finalFileName) - arguments = projectConfig.getItem("global.args", []) - shellArguments = [] - for argument in arguments: - shellArgument = '"' + argument + '"' - shellArguments.append(shellArgument) - - runCommand = 'cd target && %(file_path)s %(arguments)s' % { - 'file_path': finalFilePath.shellString(), - 'arguments': ' '.join(shellArguments) - } - - return os.system(runCommand) - - def clean(self, projectConfig): - for cleanPath in (TARGET_OBJECT_PATH, - TARGET_OBJECT_MAIN_PATH, - TARGET_SUBMAKE_PATH, - TARGET_SUBMAKE_MAIN_PATH, - TARGET_MAKEFILE_PATH): - self.logger.debug('Remove: ' + str(cleanPath)) - os.system('rm -rf ' + cleanPath.shellString()) - - return True diff --git a/deps/kake/src/modules/gmake/__init__.py b/deps/kake/src/modules/gmake/__init__.py deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/modules/gmake/module.py b/deps/kake/src/modules/gmake/module.py deleted file mode 100755 index 0fe8f12..0000000 --- a/deps/kake/src/modules/gmake/module.py +++ /dev/null @@ -1,226 +0,0 @@ -import os -import sys -import modules.module - -from util import Path, PathUtil, FileUtil, Configuration, ConsoleLogger -from backends.gmake.engine import Document as GMakeDocument -from backends.gmake.engine import SimpleRule as GMakeSimpleRule -from backends.gmake.engine import GCCCompileRule -from backends.gmake.engine import GCCLinkRule -from backends.gmake.engine import ArRule - -SRC_PATH = Path('src', True) -INCLUDE_PATH = Path('include', True) -TARGET_PATH = Path('target', True) -SRC_MAIN_PATH = SRC_PATH.join('main') -SRC_TEST_PATH = SRC_PATH.join('test') -INCLUDE_MAIN_PATH = INCLUDE_PATH.join('main') -INCLUDE_TEST_PATH = INCLUDE_PATH.join('test') -TARGET_OBJECT_PATH = TARGET_PATH.join('object') -TARGET_OBJECT_MAIN_PATH = TARGET_OBJECT_PATH.join('main') -TARGET_SUBMAKE_PATH = TARGET_PATH.join('submake') -TARGET_SUBMAKE_MAIN_PATH = TARGET_SUBMAKE_PATH.join('main') -TARGET_MAKEFILE_PATH = TARGET_PATH.join('Makefile') -FINAL_TARGET_SUBMAKE_PATH = TARGET_SUBMAKE_PATH.join('final_target.mk') - -class Module(modules.module.Module): - mainSourceFiles = [] - objectFiles = [] - defaultConf = {} - - def __init__(self): - self.defaultConf['target_type'] = 'executable' - self.defaultConf['cc'] = 'gcc' - self.defaultConf['cxxc'] = 'g++' - self.defaultConf['cflags'] = '' - self.defaultConf['cxxflags'] = '' - self.defaultConf['fpic'] = True - self.defaultConf['autolink'] = True - self.defaultConf['ar'] = 'ar' - self.defaultConf['ld'] = 'g++' - self.defaultConf['ldflags'] = '' - self.defaultConf['ld_library_paths'] = [] - self.defaultConf['c_src_exts'] = ['.c'] - self.defaultConf['cxx_src_exts'] = ['.cpp'] - self.defaultConf['c_include_paths'] = [] - self.defaultConf['cxx_include_paths'] = [] - self.defaultConf['cxx_using_c_include_paths'] = True - self.logger = ConsoleLogger('cmodule') - - def initKakefile(self): - kakefile = FileUtil.openFile('Kakefile') - kakefileContent = """project: - name: ${name} - version: ${version} - type: cpp""" - kakefile.write(kakefileContent) - kakefile.close() - - return True - - def init(self): - FileUtil.createDirectory(SRC_PATH) - FileUtil.createDirectory(INCLUDE_PATH) - FileUtil.createDirectory(TARGET_PATH) - FileUtil.createDirectory(SRC_MAIN_PATH) - FileUtil.createDirectory(SRC_TEST_PATH) - FileUtil.createDirectory(INCLUDE_MAIN_PATH) - FileUtil.createDirectory(INCLUDE_TEST_PATH) - - return True - - def make(self, projectConfig): - makeConf = { - 'target_type': projectConfig.getItem('make.configuration.target.type', self.defaultConf['target_type']), - 'cc': projectConfig.getItem('make.configuration.compiler.c.cc', self.defaultConf['cc']), - 'cxxc': projectConfig.getItem('make.configuration.compiler.cpp.cc', self.defaultConf['cxxc']), - 'cflags': projectConfig.getItem('make.configuration.compiler.c.flags', self.defaultConf['cflags']), - 'cxxflags': projectConfig.getItem('make.configuration.compiler.cpp.flags', self.defaultConf['cxxflags']), - 'fpic': projectConfig.getItem('make.configuration.compiler.fpic', self.defaultConf['fpic']), - 'autolink': projectConfig.getItem('make.configuration.linker.autolink', self.defaultConf['autolink']), - 'ar': projectConfig.getItem('make.configuration.archiver.ar', self.defaultConf['ar']), - 'ld': projectConfig.getItem('make.configuration.linker.ld', self.defaultConf['ld']), - 'ldflags': projectConfig.getItem('make.configuration.linker.flags', self.defaultConf['ldflags']), - 'ld_library_paths': projectConfig.getItem('make.configuration.linker.library_paths', self.defaultConf['ld_library_paths']), - 'libraries': projectConfig.getItem('make.configuration.linker.libraries', []), - 'c_src_exts': projectConfig.getItem('make.configuration.compiler.c.src_exts', self.defaultConf['c_src_exts']), - 'cxx_src_exts': projectConfig.getItem('make.configuration.compiler.cpp.src_exts', self.defaultConf['cxx_src_exts']), - 'c_include_paths': projectConfig.getItem('make.configuration.compiler.c.include_paths', self.defaultConf['c_include_paths']), - 'cxx_include_paths': projectConfig.getItem('make.configuration.compiler.cpp.include_paths', self.defaultConf['cxx_include_paths']), - 'cxx_using_c_include_paths': projectConfig.getItem('make.configuration.compiler.cpp.inherit_c_include_path', self.defaultConf['cxx_using_c_include_paths']) - } - - makefile = GMakeDocument(TARGET_MAKEFILE_PATH) - - self.mainSourceFiles.clear() - self.objectFiles.clear() - cSourceFiles = [] - cppSourceFiles = [] - FileUtil.searchAllFiles(cSourceFiles, SRC_MAIN_PATH, makeConf['c_src_exts']) - FileUtil.searchAllFiles(cppSourceFiles, SRC_MAIN_PATH, makeConf['cxx_src_exts']) - self.mainSourceFiles.extend(cSourceFiles) - self.mainSourceFiles.extend(cppSourceFiles) - - for fileName in self.mainSourceFiles: - filePath = Path(fileName, True) - relPath = filePath.getRelevantPath(SRC_MAIN_PATH) - dirName = relPath.getDirName() - baseName = relPath.getBaseName() - - subMakeDirPath = TARGET_SUBMAKE_MAIN_PATH.join(dirName) - FileUtil.createDirectory(subMakeDirPath) - - subMakefilePath = subMakeDirPath.join(PathUtil.getPrefix(baseName)) - subMakefilePath.appendExt('mk') - subMakefile = GMakeDocument(subMakefilePath) - - compileRule = GCCCompileRule(filePath, [INCLUDE_MAIN_PATH], SRC_MAIN_PATH, TARGET_OBJECT_MAIN_PATH, makeConf) - subMakefile.addRule(compileRule) - makefile.addSubDocument(subMakefile) - - objectFilePath = Path(compileRule.getTarget()) - self.objectFiles.append(objectFilePath) - - if makeConf['autolink']: - if makeConf['target_type'] == 'executable': - subMakefile = GMakeDocument(FINAL_TARGET_SUBMAKE_PATH) - - finalFileName = '%(name)s-%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - finalFilePath = TARGET_PATH.join(finalFileName) - - linkRule = GCCLinkRule(finalFilePath, self.objectFiles, makeConf) - subMakefile.addRule(linkRule) - - makefile.addSubDocument(subMakefile) - - allRule = GMakeSimpleRule('all', [finalFilePath]) - makefile.addRule(allRule) - elif makeConf['target_type'] == 'dynamic_library': - subMakefile = GMakeDocument(FINAL_TARGET_SUBMAKE_PATH) - - finalFileName = 'lib%(name)s.so.%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - finalFilePath = TARGET_PATH.join(finalFileName) - - linkRule = GCCLinkRule(finalFilePath, self.objectFiles, makeConf) - subMakefile.addRule(linkRule) - - makefile.addSubDocument(subMakefile) - - allRule = GMakeSimpleRule('all', [finalFilePath]) - makefile.addRule(allRule) - elif makeConf['target_type'] == 'static_library': - subMakefile = GMakeDocument(FINAL_TARGET_SUBMAKE_PATH) - - finalFileName = 'lib%(name)s.a.%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - finalFilePath = TARGET_PATH.join(finalFileName) - - arRule = ArRule(finalFilePath, self.objectFiles, makeConf) - subMakefile.addRule(arRule) - - makefile.addSubDocument(subMakefile) - - allRule = GMakeSimpleRule('all', [finalFilePath]) - makefile.addRule(allRule) - else: - self.logger.warn('target_type is not recognized!') - sys.exit(1) - else: - allRule = GMakeSimpleRule('all', self.objectFiles) - makefile.addRule(allRule) - - makefile.writeToFile() - - return True - - def build(self, projectConfig): - returnValue = os.system('make -f %(makefile)s' % { - 'makefile': TARGET_PATH.join('Makefile').shellString() }) - - if returnValue == 0: - return True - else: - return False - - def test(self): - pass - - def package(self): - pass - - def deploy(self): - pass - - def run(self, projectConfig): - finalFileName = '%(name)s-%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0') } - finalFilePath = TARGET_PATH.join(finalFileName) - arguments = projectConfig.getItem("global.args", []) - shellArguments = [] - for argument in arguments: - shellArgument = '"' + argument + '"' - shellArguments.append(shellArgument) - - runCommand = 'cd target && %(file_path)s %(arguments)s' % { - 'file_path': finalFilePath.shellString(), - 'arguments': ' '.join(shellArguments) - } - - return os.system(runCommand) - - def clean(self, projectConfig): - for cleanPath in (TARGET_OBJECT_PATH, - TARGET_OBJECT_MAIN_PATH, - TARGET_SUBMAKE_PATH, - TARGET_SUBMAKE_MAIN_PATH, - TARGET_MAKEFILE_PATH): - self.logger.debug('Remove: ' + str(cleanPath)) - os.system('rm -rf ' + cleanPath.shellString()) - - return True diff --git a/deps/kake/src/modules/ld/__init__.py b/deps/kake/src/modules/ld/__init__.py deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/modules/ld/module.py b/deps/kake/src/modules/ld/module.py deleted file mode 100755 index 987b33a..0000000 --- a/deps/kake/src/modules/ld/module.py +++ /dev/null @@ -1,180 +0,0 @@ -import os -import modules.module - -from util import Path, PathUtil, FileUtil, Configuration, ConsoleLogger -from backends.gmake.engine import Document as GMakeDocument -from backends.gmake.engine import SimpleRule as GMakeSimpleRule -from backends.gmake.engine import GCCCompileRule -from backends.gmake.engine import GCCLinkRule -from backends.gmake.engine import ArRule - -TARGET_PATH = Path('target', True) -TARGET_MAKEFILE_PATH = TARGET_PATH.join('Makefile') -TARGET_SUBMAKE_PATH = TARGET_PATH.join('submake') -FINAL_TARGET_SUBMAKE_PATH = TARGET_SUBMAKE_PATH.join('final_target') - -class Module(modules.module.Module): - objectFiles = [] - defaultConf = {} - - def __init__(self): - modules.module.Module.__init__(self) - self.defaultConf['target_type'] = 'executable' - self.defaultConf['ar'] = 'ar' - self.defaultConf['ld'] = 'g++' - self.defaultConf['ldflags'] = '' - self.defaultConf['ld_library_paths'] = [] - self.logger = ConsoleLogger('ldmodule') - - def initKakefile(self): - kakefile = FileUtil.openFile('Kakefile') - kakefileContent = """project: - name: ${name} - version: ${version} - type: ld - - make: - configuration: - linker: - include: [ ]""" - kakefile.write(kakefileContent) - kakefile.close() - - return True - - def init(self): - FileUtil.createDirectory(TARGET_PATH) - - return True - - def make(self, projectConfig): - makeConf = { - 'target_type': projectConfig.getItem('make.configuration.target.type', self.defaultConf['target_type']), - 'ar': projectConfig.getItem('make.configuration.archiver.ar', self.defaultConf['ar']), - 'ld': projectConfig.getItem('make.configuration.linker.ld', self.defaultConf['ld']), - 'ldflags': projectConfig.getItem('make.configuration.linker.flags', self.defaultConf['ldflags']), - 'ld_library_paths': projectConfig.getItem('make.configuration.linker.library_paths', self.defaultConf['ld_library_paths']), - 'libraries': projectConfig.getItem('make.configuration.linker.libraries', []), - 'solution_dir': projectConfig.getItem('global.solution.dir', '..'), - 'sources': projectConfig.getItem('make.configuration.linker.include', []), - } - - makefile = GMakeDocument(TARGET_MAKEFILE_PATH) - objectSourcePaths = [] - - self.objectFiles.clear() - solutionDirPath = Path(makeConf['solution_dir'], True) - objectFilePaths = [] - for objectSourcePath in makeConf['sources']: - objectSourcePath = solutionDirPath.join(objectSourcePath) - FileUtil.searchAllFiles(objectFilePaths, objectSourcePath, [ '.o' ]) - - for objectFilePath in objectFilePaths: - self.objectFiles.append(Path(objectFilePath)) - - finalFilePath = self.getFinalFilePath(projectConfig, makeConf) - linkRule = self.getLinkRule(finalFilePath, makeConf) - subMakefile = GMakeDocument(FINAL_TARGET_SUBMAKE_PATH) - subMakefile.addRule(linkRule) - - makefile.addSubDocument(subMakefile) - - allRule = GMakeSimpleRule('all', [finalFilePath]) - makefile.addRule(allRule) - - makefile.writeToFile() - - return True - - def build(self, projectConfig): - returnValue = os.system('make -f %(makefile)s' % { - 'makefile': TARGET_PATH.join('Makefile').shellString() }) - - if returnValue == 0: - return True - else: - return False - - def test(self): - pass - - def package(self): - pass - - def deploy(self): - pass - - def run(self, projectConfig): - finalFileName = '%(name)s-%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0') } - finalFilePath = TARGET_PATH.join(finalFileName) - arguments = projectConfig.getItem("global.args", []) - shellArguments = [] - for argument in arguments: - shellArgument = '"' + argument + '"' - shellArguments.append(shellArgument) - - runCommand = 'cd target && %(file_path)s %(arguments)s' % { - 'file_path': finalFilePath.shellString(), - 'arguments': ' '.join(shellArguments) - } - - return os.system(runCommand) - - def clean(self, projectConfig): - makeConf = self.getMakeConf(projectConfig) - finalFilePath = self.getFinalFilePath(projectConfig, makeConf) - - for cleanPath in (TARGET_SUBMAKE_PATH, - TARGET_MAKEFILE_PATH, - FINAL_TARGET_SUBMAKE_PATH, - finalFilePath): - self.logger.debug('Remove: ' + str(cleanPath)) - os.system('rm -rf ' + cleanPath.shellString()) - - return True - - def getMakeConf(self, projectConfig): - return { - 'target_type': projectConfig.getItem('make.configuration.target.type', self.defaultConf['target_type']), - 'ar': projectConfig.getItem('make.configuration.archiver.ar', self.defaultConf['ar']), - 'ld': projectConfig.getItem('make.configuration.linker.ld', self.defaultConf['ld']), - 'ldflags': projectConfig.getItem('make.configuration.linker.flags', self.defaultConf['ldflags']), - 'ld_library_paths': projectConfig.getItem('make.configuration.linker.library_paths', self.defaultConf['ld_library_paths']), - 'libraries': projectConfig.getItem('make.configuration.linker.libraries', []), - 'solution_dir': projectConfig.getItem('global.solution.dir', '..'), - 'sources': projectConfig.getItem('make.configuration.linker.include', []), - } - - - def getFinalFilePath(self, projectConfig, makeConf): - finalFileName = '' - if makeConf['target_type'] == 'executable': - finalFileName = '%(name)s-%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - elif makeConf['target_type'] == 'dynamic_library': - finalFileName = 'lib%(name)s.so.%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - elif makeConf['target_type'] == 'static_library': - finalFileName = 'lib%(name)s.a.%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0')} - - return TARGET_PATH.join(finalFileName) - - def getLinkRule(self, finalFilePath, makeConf): - linkRule = None - if makeConf['target_type'] == 'executable': - linkRule = GCCLinkRule(finalFilePath, self.objectFiles, makeConf) - elif makeConf['target_type'] == 'dynamic_library': - linkRule = GCCLinkRule(finalFilePath, self.objectFiles, makeConf) - elif makeConf['target_type'] == 'static_library': - linkRule = ArRule(finalFilePath, self.objectFiles, makeConf) - else: - self.logger.warn('target_type is not recognized!') - sys.exit(1) - - return linkRule diff --git a/deps/kake/src/modules/module.py b/deps/kake/src/modules/module.py deleted file mode 100755 index 9e9c076..0000000 --- a/deps/kake/src/modules/module.py +++ /dev/null @@ -1,156 +0,0 @@ -import os -import util -from util import Path -from util import PathUtil, FileUtil -from util import ConsoleLogger - -FUNCTIONS_WITH_CONF = set(['make', 'build', 'run', 'clean']) -FUNCTIONS_WITHOUT_CONF = set(['init']) -logger = ConsoleLogger('module') - -STANDARD_PHASE = set(['init', 'make', 'build', 'test', 'package', 'deploy', 'run', 'clean']) - -def MakeBuildVersion(): - versionFile = None - if not PathUtil.exist('target/BUILD_VERSION'): - versionFile = FileUtil.openFile('target/BUILD_VERSION') - else: - versionFile = open('target/BUILD_VERSION', 'w') - versionFile.close() - -def KakefileChanged(): - if PathUtil.exist('target/BUILD_VERSION'): - kakefileModifiedTime = os.path.getmtime('Kakefile') - lastBuildTime = os.path.getmtime('target/BUILD_VERSION') - if kakefileModifiedTime > lastBuildTime: - return True - - return False - -class Module(object): - def initCaller(self): - self.enterPhase('Init') - if not PathUtil.exist('Kakefile'): - self.initKakefile() - else: - logger.debug('Skip initialize Kakefile') - self.init() - self.exitPhase('Init') - - def makeCaller(self, projectConfig): - self.enterPhase('Make') - isSuccessful = self.make(projectConfig) - self.exitPhase('Make') - - return isSuccessful - - def buildCaller(self, projectConfig): - if KakefileChanged(): - self.cleanCaller(projectConfig) - - if not self.makeCaller(projectConfig): - return False - - self.enterPhase('Build') - isSuccessful = self.build(projectConfig) - MakeBuildVersion() - self.exitPhase('Build') - - return isSuccessful - - def testCaller(self): - self.enterPhase('Test') - self.test() - self.exitPhase('Test') - - def packageCaller(self): - self.enterPhase('Package') - self.package() - self.exitPhase('Package') - - def deployCaller(self): - self.enterPhase('Deployment') - self.deploy() - self.exitPhase('Deployment') - - def runCaller(self, projectConfig): - if not self.buildCaller(projectConfig): - return False - - self.enterPhase('Run') - returnValue = self.run(projectConfig) - self.exitPhase('Run') - - return returnValue - - def cleanCaller(self, projectConfig): - self.enterPhase('Clean') - self.clean(projectConfig) - self.exitPhase('Clean') - - def enterPhase(self, phaseName): - logger.info('Enter phase: ' + phaseName) - - def exitPhase(self, phaseName): - logger.info('Exit phase: ' + phaseName) - - def initKakefile(self): - raise NotImplementedError() - def init(self): - raise NotImplementedError() - def make(self, projectConfig): - raise NotImplementedError() - def build(self, projectConfig): - raise NotImplementedError() - def test(self): - raise NotImplementedError() - def package(self): - raise NotImplementedError() - def deploy(self): - raise NotImplementedError() - def run(self, projectConfig): - raise NotImplementedError() - def clean(self, projectConfig): - raise NotImplementedError() - -def isStandardPhase(phase): - return phase in STANDARD_PHASE - -def isPhaseWithConfig(phase): - return phase in FUNCTIONS_WITH_CONF - -def isPhaseWithoutConfig(phase): - return phase in FUNCTIONS_WITHOUT_CONF - -class ModuleManager(object): - moduleList = [] - - def __init__(self): - self.scanModules() - - def scanModules(self): - modules = __import__('modules') - modulesPath = modules.__path__[0] - modulesSubFiles = os.listdir(modulesPath) - for modulesSubFile in modulesSubFiles: - if os.path.isdir(PathUtil.join(modulesPath, modulesSubFile)) and not modulesSubFile.startswith('__'): - self.moduleList.append(modulesSubFile) - - def getModule(self, projectType): - if not projectType in self.moduleList: - self.reportProjectTypeNotSupported() - - moduleName = 'modules.' + projectType + '.module' - moduleFile = __import__(moduleName, {}, {}, ['a']) - module = moduleFile.Module() - - return module - - def reportProjectTypeNotSupported(self): - logger.fatal('The project type is not supported') - sys.exit() - - def getModuleAction(self, module, phase): - if hasattr(module, phase): - return getattr(module, phase + 'Caller') - return None diff --git a/deps/kake/src/modules/solution/__init__.py b/deps/kake/src/modules/solution/__init__.py deleted file mode 100755 index e69de29..0000000 diff --git a/deps/kake/src/modules/solution/module.py b/deps/kake/src/modules/solution/module.py deleted file mode 100755 index d6dccfa..0000000 --- a/deps/kake/src/modules/solution/module.py +++ /dev/null @@ -1,108 +0,0 @@ -import os -import sys -import modules.module - -from util import Path, PathUtil, FileUtil, Configuration, ConsoleLogger -from backends.gmake.engine import Document as GMakeDocument -from backends.gmake.engine import SimpleRule as GMakeSimpleRule -from backends.gmake.engine import GCCCompileRule -from backends.gmake.engine import GCCLinkRule -from backends.gmake.engine import ArRule - -PROJECT_PATH = Path('project', True) -TARGET_PATH = Path('target', True) - -class Module(modules.module.Module): - defaultConf = {} - projects = [] - - def __init__(self): - #'solution_dir': projectConfig.getItem('global.solution.dir', '..'), - self.logger = ConsoleLogger('cmodule') - - projectNames = os.listdir('project') - projectNames.sort() - for projectName in projectNames: - project = { - 'name': projectName, - 'path': PROJECT_PATH.join(projectName) - } - self.projects.append(project) - - - def initKakefile(self): - kakefile = FileUtil.openFile('Kakefile') - kakefileContent = """project: - name: ${name} - version: ${version} - type: solution - -run: - default: ${default}""" - kakefile.write(kakefileContent) - kakefile.close() - - return True - - def init(self): - FileUtil.createDirectory(PROJECT_PATH) - FileUtil.createDirectory(TARGET_PATH) - - return True - - def make(self, projectConfig): - return True - - def build(self, projectConfig): - for project in self.projects: - projectFilename = globalConfig.getItem('project.filename') - if existProjectFile(projectFilename) == False: - reportNoProjectFile() - - projectFile = open(projectFilename) - projectFileContent = projectFile.read() - projectFile.close() - projectConfig = yaml.load(projectFileContent) - projectConfig['global'] = globalConfig.toDictionary() - projectConfig['global']['args'] = remainingArguments - - module = moduleManager.getModule(projectConfig['project']['type']) - moduleAction = moduleManager.getModuleAction(module, makePhase) - moduleAction(Configuration.fromDictionary(projectConfig)) - - - returnValue = 0 - if returnValue == 0: - return True - else: - return False - - def test(self): - pass - - def package(self): - pass - - def deploy(self): - pass - - def run(self, projectConfig): - finalFileName = '%(name)s-%(version)s' % { - 'name': projectConfig.getItem('project.name', 'noname'), - 'version': projectConfig.getItem('project.version', '1.0.0') } - finalFilePath = TARGET_PATH.join(finalFileName) - arguments = projectConfig.getItem("global.args", []) - shellArguments = [] - for argument in arguments: - shellArgument = '"' + argument + '"' - shellArguments.append(shellArgument) - - runCommand = 'cd target && %(file_path)s %(arguments)s' % { - 'file_path': finalFilePath.shellString(), - 'arguments': ' '.join(shellArguments) - } - - return os.system(runCommand) - - def clean(self, projectConfig): - return True diff --git a/deps/kake/src/runtest.py b/deps/kake/src/runtest.py deleted file mode 100755 index 86b4a68..0000000 --- a/deps/kake/src/runtest.py +++ /dev/null @@ -1,83 +0,0 @@ -#! /usr/bin/python3 - -from unittest import TestLoader, TextTestRunner, TestSuite -import os.path -import test.util - -class AutomaticTestRunner: - def __init__(self): - self.loader = TestLoader() - self.runner = TextTestRunner() - - def runTests(self): - testCaseNames = self.findTestCaseNames() - testCaseModules = self.loadTestCaseModules(testCaseNames) - testSuite = self.loadTestsFromModules(testCaseModules) - return self.runner.run(testSuite) - - def findTestCaseNames(self): - rootSuite = __import__('test') - rootSuitePath = rootSuite.__path__[0] - return self.findTestCaseNamesFromDirectory(rootSuitePath) - - def loadTestCaseModules(self, testCaseNames): - testCaseModules = [] - for testCaseName in testCaseNames: - testCaseModule = __import__(testCaseName, {}, {}, ['a']) - testCaseModules.append(testCaseModule) - - return testCaseModules - - def loadTestsFromModules(self, testCaseModules): - suite = TestSuite() - for testCaseModule in testCaseModules: - subSuite = self.loader.loadTestsFromModule(testCaseModule) - suite.addTest(subSuite) - - return suite - - def findTestCaseNamesFromDirectory(self, rootPath): - subFiles = os.listdir(rootPath) - - testModuleNames = self.findSubModuleNames(rootPath, subFiles) - subSuitePaths = self.findSubSuitePaths(rootPath, subFiles) - subSuiteTestModuleNames = self.findTestCaseNamesFromSubSuitePaths(subSuitePaths) - testModuleNames.extend(subSuiteTestModuleNames) - - return testModuleNames - - def findSubModuleNames(self, rootPath, subFiles): - testModuleNames = [] - for subFile in subFiles: - subFilePath = rootPath + '/' + subFile - if os.path.isfile(subFilePath) and subFile.endswith('.py') and not subFile.startswith('__'): - rootModuleName = os.path.relpath(rootPath).replace('/', '.') - testModuleName = rootModuleName + '.' + subFile[0:-3] - testModuleNames.append(testModuleName) - - return testModuleNames - - def findSubSuitePaths(self, rootPath, subFiles): - subSuitePaths = [] - for subFile in subFiles: - subFilePath = rootPath + '/' + subFile - if os.path.isdir(subFilePath) and not subFile.startswith('__'): - subSuitePaths.append(subFilePath) - - return subSuitePaths - - def findTestCaseNamesFromSubSuitePaths(self, subSuitePaths): - testModuleNames = [] - - for subSuitePath in subSuitePaths: - subTestModuleNames = self.findTestCaseNamesFromDirectory(subSuitePath) - testModuleNames.extend(subTestModuleNames) - - return testModuleNames - -def main(): - testRunner = AutomaticTestRunner() - testResult = testRunner.runTests() - -if __name__ == '__main__': - main() diff --git a/deps/kake/src/util.py b/deps/kake/src/util.py deleted file mode 100755 index 02eb361..0000000 --- a/deps/kake/src/util.py +++ /dev/null @@ -1,196 +0,0 @@ -import os -import sys -import logging - -class ConsoleLogger(object): - def __init__(self, name): - handler = logging.StreamHandler(sys.stdout) - fmt = '[%(levelname)s] %(message)s' - formatter = logging.Formatter(fmt) - handler.setFormatter(formatter) - - self.logger = logging.getLogger(name) - self.logger.addHandler(handler) - self.logger.setLevel(logging.DEBUG) - - def critical(self, msg, *args, **kwargs): - self.logger.critical(msg, *args, **kwargs) - - def debug(self, msg, *args, **kwargs): - self.logger.debug(msg, *args, **kwargs) - - def error(self, msg, *args, **kwargs): - self.logger.error(msg, *args, **kwargs) - - def exception(self, msg, *args, **kwargs): - self.logger.exception(msg, *args, **kwargs) - - def fatal(self, msg, *args, **kwargs): - self.logger.fatal(msg, *args, **kwargs) - - def info(self, msg, *args, **kwargs): - self.logger.info(msg, *args, **kwargs) - - def warn(self, msg, *args, **kwargs): - self.logger.warn(msg, *args, **kwargs) - -logger = ConsoleLogger('util') - -class PathUtil(object): - @classmethod - def join(cls, path1, path2): - return Path(path1).join(path2).string() - - @classmethod - def getPrefix(cls, path): - return Path(path).getPrefix() - - @classmethod - def exist(cls, filePath): - filename = str(filePath) - return os.path.exists(str(filename)) - - @classmethod - def toPathList(cls, stringList, isAbsolutePath = False): - pathList = [] - for string in stringList: - pathList.append(Path(string, isAbsolutePath)) - - return pathList - -class FileUtil(object): - @classmethod - def openFile(cls, filePath): - filePath = Path(filePath) - dirName = filePath.getDirName() - if dirName and not os.path.exists(dirName): - os.makedirs(dirName) - return open(filePath.string(), 'w') - - @classmethod - def createDirectory(cls, dirPath): - dirName = str(dirPath) - if os.path.exists(dirName): - logger.debug('Skip creating exists directory: %(dirName)s' % {'dirName': dirName}) - else: - logger.debug('Create directory: %(dirName)s' % {'dirName': dirName}) - os.makedirs(dirName) - - @classmethod - def searchAllFiles(cls, fileList, root, exts): - for dirPath, dirNames, fileNames in os.walk(str(root), True): - for fileName in fileNames: - filePath = Path(fileName) - if filePath.getExt() in exts: - fileList.append(PathUtil.join(dirPath, fileName)) - -class Path(object): - path = "" - absolute = False - - def __init__(self): - self.path = "" - self.absolute = False - pass - - def __init__(self, path, isAbsolutePath = False): - if isAbsolutePath: - self.setAbsolutePath(path) - else: - self.setRelevantPath(path) - - def __str__(self): - return self.string() - - def string(self): - return self.path - - def shellString(self): - return self.path.replace(' ', '\\ ') - - def clone(self): - return Path(self.getPath(), self.absolute) - - def setAbsolutePath(self, path): - self.path = os.path.abspath(str(path)) - self.absolute = True - - def setRelevantPath(self, path): - self.path = str(path) - self.absolute = False - - def getPath(self): - return self.path - - def getRelevantPath(self, rootPath): - return Path(os.path.relpath(self.path, str(rootPath)), False) - - def getDirName(self): - return os.path.dirname(self.path) - - def getBaseName(self): - return os.path.basename(self.path) - - def join(self, subPath): - return self.clone().joinInPlace(subPath) - - def joinInPlace(self, subPath): - if ( self.path.endswith('/') ): - self.path = self.path + str(subPath) - else: - self.path = self.path + "/" + str(subPath) - - return self - - def getPrefix(self): - return os.path.extsep.join(os.path.splitext(self.path)[:-1]) - - def getExt(self): - return os.path.splitext(self.path)[-1] - - def appendExt(self, ext): - self.path = self.path + os.path.extsep + ext - - def removeExt(self, ext): - self.path = self.getPrefix() - - def replaceExt(self, ext): - self.path = self.getPrefix() + os.path.extsep + ext - -class Configuration(object): - def __init__(self): - self.configuration = {} - - @classmethod - def fromDictionary(cls, dictionary): - configuration = Configuration() - configuration.configuration = dictionary - return configuration - - def toDictionary(self): - return self.configuration - - def getItem(self, path, defaultValue = None): - keys = path.split('.') - currentValue = self.configuration - try: - for key in keys: - currentValue = currentValue[key] - except KeyError: - currentValue = defaultValue - - return currentValue - - def setItem(self, path, value): - keys = path.split('.') - currentDictionary = self.configuration - - keyIndex = 0 - for key in keys: - keyIndex = keyIndex + 1 - if not keyIndex == len(keys): - if key not in currentDictionary: - currentDictionary[key] = {} - currentDictionary = currentDictionary[key] - else: - currentDictionary[key] = value diff --git a/deps/kake/templates/project/CppProject.kake b/deps/kake/templates/project/CppProject.kake new file mode 100644 index 0000000..c6a2b1b --- /dev/null +++ b/deps/kake/templates/project/CppProject.kake @@ -0,0 +1,20 @@ +'use strict'; + +module.exports = { + name: 'CppProject', + version: '0.0.1', + type: 'cpp', + target: 'executable', + targetPath: '', + compiler: { + cxx: 'g++', + cxxflags: ['-std=c++11'], + src: [ + ], + includePaths: [ + ] + }, + linker: { + ld: 'g++', + } +}; diff --git a/deps/kake/templates/solution/SolutionSample.kake b/deps/kake/templates/solution/SolutionSample.kake new file mode 100644 index 0000000..4813b0c --- /dev/null +++ b/deps/kake/templates/solution/SolutionSample.kake @@ -0,0 +1,9 @@ +'use strict'; + +module.exports = { + name: 'SampleSolution', + version: '0.0.1', + type: 'solution', + projects: [ + ] +}; diff --git a/deps/kake/templates/solution/deps.kake b/deps/kake/templates/solution/deps.kake new file mode 100644 index 0000000..8eb077a --- /dev/null +++ b/deps/kake/templates/solution/deps.kake @@ -0,0 +1,4 @@ +'use strict'; + +module.exports = { +}; diff --git a/deps/meshy/CMakeLists.txt b/deps/meshy/CMakeLists.txt deleted file mode 100755 index de597c7..0000000 --- a/deps/meshy/CMakeLists.txt +++ /dev/null @@ -1,76 +0,0 @@ -cmake_minimum_required(VERSION 3.3) -project(net_framework) - -set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") - -set(SOURCE_FILES - include/bsd/net_bsd.h - include/epoll/EPollClient.h - include/epoll/EPollConnection.h - include/epoll/epollloop.h - include/epoll/EPollServer.h - include/epoll/EPollStream.h - include/iocp/IOCPClient.h - include/iocp/IOCPConnection.h - include/iocp/IOCPLoop.h - include/iocp/IOCPServer.h - include/iocp/IOCPStream.h - include/kqueue/kqueue.h - include/kqueue/kqueueloop.h - include/linux/common.h - include/linux/net_linux.h - include/template/utils/thread_pool.tcc - include/utils/common_utils.h - include/utils/concurrent_queue.h - include/utils/logger.h - include/utils/thread_pool.h - include/utils/time.h - include/utils/String.h - include/win32/net_win32.h - include/bytearray.h - include/DataSink.h - include/eventqueue.h - include/eventqueueloop.h - include/IoLoop.h - include/loop.h - include/net.h - include/PackageDataSink.h - include/Meshy.h - src/epoll/EPollClient.cpp - src/epoll/EPollConnection.cpp - src/epoll/epollloop.cpp - src/epoll/EPollServer.cpp - src/epoll/EPollStream.cpp - src/iocp/IOCPClient.cpp - src/iocp/IOCPConnection.cpp - src/iocp/iocploop.cpp - src/iocp/IOCPServer.cpp - src/iocp/IOCPStream.cpp - src/kqueue/kqueue.cpp - src/kqueue/kqueueloop.cpp - src/utils/common_utils.cpp - src/utils/logger.cpp - src/utils/thread_pool.cpp - src/utils/time.cpp - src/utils/String.cpp - src/win32/net_win32.cpp - src/client_sample.cpp - src/eventqueueloop.cpp - src/net.cpp - src/PackageDataSink.cpp - src/sample.cpp - CMakeLists.txt - Makefile.bsd - Makefile.linux include/utils/exendian.h include/rest/HttpServer.h include/rest/HttpRequest.h include/rest/HttpResponse.h include/rest/HttpClient.h include/rest/HttpDataSink.h include/rest/HttpContext.h src/http/HttpResponse.cpp src/http/HttpRequest.cpp include/rest/HttpConnection.h src/http/HttpServer.cpp src/http/HttpConnection.cpp) - -include_directories(include) -include_directories(include/bsd) -include_directories(include/epoll) -include_directories(include/iocp) -include_directories(include/kqueue) -include_directories(include/linux) -include_directories(include/template) -include_directories(include/utils) -include_directories(include/win32) - -add_executable(net_framework ${SOURCE_FILES}) diff --git a/deps/meshy/Kakefile b/deps/meshy/Kakefile deleted file mode 100755 index 8fc29a6..0000000 --- a/deps/meshy/Kakefile +++ /dev/null @@ -1,24 +0,0 @@ -project: - name: meshy - version: 0.1.0 - type: cpp - -make: - configuration: - targets: - meshy: - type: shared_library - exclude: [ "sample.cpp", "client_sample.cpp" ] - sample: - type: executable - exclude: [ "client_sample.cpp" ] - client_sample: - type: executable - exclude: [ "sample.cpp" ] - compiler: - cpp: - flags: "-std=c++11 -fPIC" - defines: [ "OS_LINUX" ] - linker: - ld: g++ - libraries: [ "pthread" ] diff --git a/deps/meshy/Makefile b/deps/meshy/Makefile deleted file mode 100755 index 76d6cad..0000000 --- a/deps/meshy/Makefile +++ /dev/null @@ -1,156 +0,0 @@ -SRC = src -INCLUDE = include -TARGET = target -BUILD = $(TARGET)/build -CC = gcc -CXX = g++ - -CXXFLAGS = -std=c++11 -I$(INCLUDE) -DOS_LINUX -g -fPIC -LDFALGS = -lpthread - -OBJECTS = $(BUILD)/PackageDataSink.o \ - $(BUILD)/EPollConnection.o \ - $(BUILD)/EPollStream.o \ - $(BUILD)/EPollClient.o \ - $(BUILD)/EPollServer.o \ - $(BUILD)/EPollLoop.o \ - $(BUILD)/eventqueueloop.o \ - $(BUILD)/net.o \ - $(BUILD)/logger.o \ - $(BUILD)/time.o \ - $(BUILD)/thread_pool.o \ - $(BUILD)/common_utils.o \ - $(BUILD)/String.o \ - $(BUILD)/HttpContext.o \ - $(BUILD)/HttpRequest.o \ - $(BUILD)/HttpResponse.o \ - $(BUILD)/HttpServer.o \ - $(BUILD)/HttpConnection.o - -OBJECTS_SAMPLE = $(BUILD)/sample.o \ - $(BUILD)/PackageDataSink.o \ - $(BUILD)/EPollConnection.o \ - $(BUILD)/EPollStream.o \ - $(BUILD)/EPollClient.o \ - $(BUILD)/EPollServer.o \ - $(BUILD)/EPollLoop.o \ - $(BUILD)/eventqueueloop.o \ - $(BUILD)/net.o \ - $(BUILD)/logger.o \ - $(BUILD)/time.o \ - $(BUILD)/thread_pool.o \ - $(BUILD)/common_utils.o \ - $(BUILD)/String.o \ - $(BUILD)/HttpContext.o \ - $(BUILD)/HttpRequest.o \ - $(BUILD)/HttpResponse.o \ - $(BUILD)/HttpServer.o \ - $(BUILD)/HttpConnection.o - -OBJECTS_CLIENT = $(BUILD)/client_sample.o \ - $(BUILD)/PackageDataSink.o \ - $(BUILD)/EPollConnection.o \ - $(BUILD)/EPollStream.o \ - $(BUILD)/EPollClient.o \ - $(BUILD)/EPollServer.o \ - $(BUILD)/EPollLoop.o \ - $(BUILD)/eventqueueloop.o \ - $(BUILD)/net.o \ - $(BUILD)/logger.o \ - $(BUILD)/time.o \ - $(BUILD)/thread_pool.o \ - $(BUILD)/common_utils.o \ - $(BUILD)/String.o \ - $(BUILD)/HttpContext.o \ - $(BUILD)/HttpRequest.o \ - $(BUILD)/HttpResponse.o \ - $(BUILD)/HttpConnection.o - -all: $(TARGET)/sample $(TARGET)/client_sample - -clean: - rm -rf $(TARGET)/* - mkdir $(BUILD) - -$(TARGET)/libmeshy.so: $(OBJECTS) - $(CXX) -o $@ $(OBJECTS) $(LDFALGS) -shared - -$(TARGET)/sample: $(OBJECTS_SAMPLE) - $(CXX) -o $@ $(OBJECTS_SAMPLE) $(LDFALGS) - -$(TARGET)/client_sample: $(OBJECTS_CLIENT) - $(CXX) -o $@ $(OBJECTS_CLIENT) $(LDFALGS) - -$(BUILD)/sample.o: $(SRC)/sample.cpp $(INCLUDE)/net.h $(INCLUDE)/eventqueue.h $(INCLUDE)/eventqueueloop.h \ - $(INCLUDE)/PackageDataSink.h $(INCLUDE)/DataSink.h $(INCLUDE)/bytearray.h \ - $(INCLUDE)/rest/HttpServer.h $(INCLUDE)/rest/HttpConnection.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/sample.cpp - -$(BUILD)/client_sample.o: $(SRC)/client_sample.cpp $(INCLUDE)/net.h $(INCLUDE)/eventqueue.h $(INCLUDE)/eventqueueloop.h \ - $(INCLUDE)/PackageDataSink.h $(INCLUDE)/DataSink.h $(INCLUDE)/bytearray.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/client_sample.cpp - -$(BUILD)/PackageDataSink.o: $(SRC)/PackageDataSink.cpp $(INCLUDE)/PackageDataSink.h \ - $(INCLUDE)/eventqueue.h $(INCLUDE)/DataSink.h $(INCLUDE)/bytearray.h \ - $(INCLUDE)/utils/thread_pool.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/PackageDataSink.cpp - -$(BUILD)/EPollServer.o: $(SRC)/epoll/EPollServer.cpp $(INCLUDE)/epoll/EPollServer.h \ - $(INCLUDE)/linux/net_linux.h \ - $(INCLUDE)/net.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/epoll/EPollServer.cpp - -$(BUILD)/EPollClient.o: $(SRC)/epoll/EPollClient.cpp $(INCLUDE)/epoll/EPollClient.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/epoll/EPollClient.cpp - -$(BUILD)/EPollConnection.o: $(SRC)/epoll/EPollConnection.cpp $(INCLUDE)/epoll/EPollConnection.h \ - $(INCLUDE)/linux/net_linux.h \ - $(INCLUDE)/net.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/epoll/EPollConnection.cpp - -$(BUILD)/EPollStream.o: $(SRC)/epoll/EPollStream.cpp $(INCLUDE)/epoll/EPollStream.h \ - $(INCLUDE)/linux/net_linux.h \ - $(INCLUDE)/net.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/epoll/EPollStream.cpp - -$(BUILD)/EPollLoop.o: $(SRC)/epoll/EPollLoop.cpp $(INCLUDE)/epoll/EPollLoop.h \ - $(INCLUDE)/loop.h $(INCLUDE)/DataSink.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/epoll/EPollLoop.cpp - -$(BUILD)/eventqueueloop.o: $(SRC)/eventqueueloop.cpp $(INCLUDE)/eventqueueloop.h \ - $(INCLUDE)/eventqueue.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/eventqueueloop.cpp - -$(BUILD)/net.o: $(SRC)/net.cpp $(INCLUDE)/net.h $(INCLUDE)/linux/net_linux.h $(INCLUDE)/bytearray.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/net.cpp - -$(BUILD)/logger.o: $(SRC)/utils/logger.cpp $(INCLUDE)/utils/logger.h \ - $(INCLUDE)/utils/concurrent_queue.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/utils/logger.cpp - -$(BUILD)/time.o: $(SRC)/utils/time.cpp $(INCLUDE)/utils/time.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/utils/time.cpp - -$(BUILD)/thread_pool.o: $(SRC)/utils/thread_pool.cpp $(INCLUDE)/utils/thread_pool.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/utils/thread_pool.cpp - -$(BUILD)/common_utils.o: $(SRC)/utils/common_utils.cpp $(INCLUDE)/utils/common_utils.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/utils/common_utils.cpp - -$(BUILD)/String.o: $(SRC)/utils/String.cpp $(INCLUDE)/utils/String.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/utils/String.cpp - -$(BUILD)/HttpContext.o: $(SRC)/http/HttpContext.cpp $(INCLUDE)/rest/HttpContext.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/http/HttpContext.cpp - -$(BUILD)/HttpRequest.o: $(SRC)/http/HttpRequest.cpp $(INCLUDE)/rest/HttpRequest.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/http/HttpRequest.cpp - -$(BUILD)/HttpResponse.o: $(SRC)/http/HttpResponse.cpp $(INCLUDE)/rest/HttpResponse.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/http/HttpResponse.cpp - -$(BUILD)/HttpServer.o: $(SRC)/http/HttpServer.cpp $(INCLUDE)/rest/HttpServer.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/http/HttpServer.cpp - -$(BUILD)/HttpConnection.o: $(SRC)/http/HttpConnection.cpp $(INCLUDE)/rest/HttpConnection.h - $(CXX) $(CXXFLAGS) -c -o $@ $(SRC)/http/HttpConnection.cpp diff --git a/deps/meshy/include/DataSink.h b/deps/meshy/include/DataSink.h index ea2157b..064ea3b 100755 --- a/deps/meshy/include/DataSink.h +++ b/deps/meshy/include/DataSink.h @@ -20,17 +20,19 @@ #ifndef NET_FRAME_DATASINK_H #define NET_FRAME_DATASINK_H -#include +#include "net.h" +#include +#include namespace meshy { - - class IStream; - class DataSink { public: - virtual int32_t OnDataIndication(IStream *stream, const char *buf, int64_t bytes) = 0; + virtual int32_t Write(IStream *stream, const char *buf, int64_t bytes) = 0; + IStream::DataHandler StreamDataHandler(IStream *stream) { + return std::bind(&DataSink::Write, this, stream, std::placeholders::_1, std::placeholders::_2); + } }; - } + #endif //NET_FRAME_DATASINK_H diff --git a/deps/meshy/include/IoLoop.h b/deps/meshy/include/IoLoop.h index a914c2a..cf189c4 100755 --- a/deps/meshy/include/IoLoop.h +++ b/deps/meshy/include/IoLoop.h @@ -16,15 +16,15 @@ * limitations under the license. */ -#pragma once - -const int MAX_EVENT_COUNT = 5000; -const int MAX_RECV_BUFF = 65535; - -#ifdef OS_WIN32 -#include "iocp/iocploop.h" -#define IoLoop IocpLoop -#elif defined(OS_LINUX) -#include "epoll/epollloop.h" -#define IoLoop EPollLoop -#endif +#pragma once + +const int MAX_EVENT_COUNT = 5000; +const int MAX_RECV_BUFF = 65535; + +#ifdef OS_WIN32 +#include "iocp/iocploop.h" +#define IoLoop IocpLoop +#elif defined(OS_LINUX) +#include "epoll/EPollLoop.h" +#define IoLoop EPollLoop +#endif diff --git a/deps/meshy/include/PackageDataSink.h b/deps/meshy/include/PackageDataSink.h index 506aa35..df11254 100755 --- a/deps/meshy/include/PackageDataSink.h +++ b/deps/meshy/include/PackageDataSink.h @@ -16,29 +16,29 @@ * limitations under the license. */ -#pragma once - -#include "DataSink.h" -#include "bytearray.h" -#include "utils/thread_pool.h" - -namespace meshy { - class EventQueue; - - class BaseEvent; - - class PackageDataSink : public DataSink { - public: - PackageDataSink(EventQueue *eventQueue); - - ~PackageDataSink(); - - int32_t OnDataIndication(IStream *stream, const char *buf, int64_t bytes) override; - - private: - EventQueue *_eventQueue; - ThreadPool *_threadPool; - ByteArray _data; - int32_t _totalSize; - }; -} \ No newline at end of file +#pragma once + +#include "DataSink.h" +#include "bytearray.h" +#include "utils/thread_pool.h" + +namespace meshy { + class EventQueue; + + class BaseEvent; + + class PackageDataSink : public DataSink { + public: + PackageDataSink(EventQueue *eventQueue); + + ~PackageDataSink(); + + int32_t Write(IStream *stream, const char *buf, int64_t bytes) override; + + private: + EventQueue *_eventQueue; + ThreadPool *_threadPool; + ByteArray _data; + int32_t _totalSize; + }; +} diff --git a/deps/meshy/include/epoll/EPollConnection.h b/deps/meshy/include/epoll/EPollConnection.h index 90547d1..9240326 100755 --- a/deps/meshy/include/epoll/EPollConnection.h +++ b/deps/meshy/include/epoll/EPollConnection.h @@ -31,8 +31,7 @@ #include "linux/net_linux.h" #include "net.h" -#include "epoll/EpollStream.h" - +#include "epoll/EPollStream.h" namespace meshy { class EPollLoop; diff --git a/deps/meshy/include/epoll/epollloop.h b/deps/meshy/include/epoll/EPollLoop.h similarity index 100% rename from deps/meshy/include/epoll/epollloop.h rename to deps/meshy/include/epoll/EPollLoop.h diff --git a/deps/meshy/include/epoll/EPollServer.h b/deps/meshy/include/epoll/EPollServer.h index e7ec5e9..dac06df 100755 --- a/deps/meshy/include/epoll/EPollServer.h +++ b/deps/meshy/include/epoll/EPollServer.h @@ -16,39 +16,38 @@ * limitations under the license. */ -#ifndef NET_FRAMEWORK_EPOLLSERVER_H -#define NET_FRAMEWORK_EPOLLSERVER_H - -#include "net.h" -#include "PackageDataSink.h" -#include "epoll/EPollConnection.h" - - -namespace meshy { - - class EPollServer : public BasicServer { - public: - EPollServer() { } - virtual ~EPollServer() { } - - int32_t Listen(const std::string& host, int32_t port, int32_t backlog = 20) override; - - void OnConnectIndication(ConnectIndicationHandler handler) { - _connectHandler = handler; - } - void OnDisconnectIndication(DisconnectIndicationHandler handler) { - _disconnectIndication = handler; - } - - EPollConnectionPtr Accept(int32_t sockfd); - - private: - int32_t _Bind(const std::string& host, int32_t port); - - DataSink* _dataSink; - ConnectIndicationHandler _connectHandler; - DisconnectIndicationHandler _disconnectIndication; - }; - -} -#endif //NET_FRAMEWORK_EPOLLSERVER_H +#ifndef NET_FRAMEWORK_EPOLLSERVER_H +#define NET_FRAMEWORK_EPOLLSERVER_H + +#include "net.h" +#include "PackageDataSink.h" +#include "epoll/EPollConnection.h" + + +namespace meshy { + class EPollServer : public BasicServer { + public: + EPollServer() { } + virtual ~EPollServer() { } + + int32_t Listen(const std::string& host, int32_t port, int32_t backlog = 20) override; + + void OnConnect(ConnectHandler handler) { + _connectHandler = handler; + } + void OnDisconnec(DisconnectHandler handler) { + _disconnectIndication = handler; + } + + EPollConnectionPtr Accept(int32_t sockfd); + + private: + int32_t _Bind(const std::string& host, int32_t port); + + DataSink* _dataSink; + ConnectHandler _connectHandler; + DisconnectHandler _disconnectIndication; + }; + +} +#endif //NET_FRAMEWORK_EPOLLSERVER_H diff --git a/deps/meshy/include/epoll/EPollStream.h b/deps/meshy/include/epoll/EPollStream.h index 998782b..e6c63b3 100755 --- a/deps/meshy/include/epoll/EPollStream.h +++ b/deps/meshy/include/epoll/EPollStream.h @@ -54,16 +54,16 @@ namespace meshy { _events = events; } - void OnDataIndication(DataIndicationHandler handler) override { + void OnData(DataHandler handler) override { _dataHandler = handler; } - DataIndicationHandler GetDataIndication() override { + DataHandler GetDataHandler() override { return _dataHandler; } private: uint32_t _events; - DataIndicationHandler _dataHandler; + DataHandler _dataHandler; }; typedef std::shared_ptr EPollStreamPtr; diff --git a/deps/meshy/include/net.h b/deps/meshy/include/net.h index 7193d90..7d09e3d 100755 --- a/deps/meshy/include/net.h +++ b/deps/meshy/include/net.h @@ -50,7 +50,7 @@ namespace meshy { #ifdef OS_WIN32 closesocket(_nativeSocket); #else - Close(_nativeSocket); + close(_nativeSocket); #endif } @@ -70,13 +70,13 @@ namespace meshy { class IStream { public: - typedef std::function DataIndicationHandler; + typedef std::function DataHandler; virtual int32_t Receive(char* buffer, int32_t bufferSize, int32_t& readSize) = 0; virtual int32_t Send(const ByteArray& byteArray) = 0; - virtual void OnDataIndication(DataIndicationHandler handler) = 0; - virtual DataIndicationHandler GetDataIndication() = 0; + virtual void OnData(DataHandler handler) = 0; + virtual DataHandler GetDataHandler() = 0; }; class IConnectable { @@ -87,14 +87,14 @@ namespace meshy { template class BasicServer : public Socket { public: - typedef std::function ConnectIndicationHandler; - typedef std::function DisconnectIndicationHandler; + typedef std::function ConnectHandler; + typedef std::function DisconnectHandler; BasicServer() { } virtual int32_t Listen(const std::string& host, int32_t port, int backlog) = 0; - virtual void OnConnectIndication(ConnectIndicationHandler handler) = 0; - virtual void OnDisconnectIndication(DisconnectIndicationHandler handler) = 0; + virtual void OnConnect(ConnectHandler handler) = 0; + virtual void OnDisconnec(DisconnectHandler handler) = 0; virtual ConnectionType Accept(int32_t listenfd) = 0; }; diff --git a/deps/meshy/include/rest/HttpConnection.h b/deps/meshy/include/rest/HttpConnection.h index c97858f..b55ed4c 100755 --- a/deps/meshy/include/rest/HttpConnection.h +++ b/deps/meshy/include/rest/HttpConnection.h @@ -32,7 +32,7 @@ namespace meshy { HttpConnection(TcpConnection* connection); - void HandleData(const char* buffer, int64_t size); + int HandleData(const char* buffer, int64_t size); void OnData(DataHandler dataHandler) { _dataHandler = dataHandler; diff --git a/deps/meshy/kake/Kakefile b/deps/meshy/kake/Kakefile new file mode 100644 index 0000000..c0b044f --- /dev/null +++ b/deps/meshy/kake/Kakefile @@ -0,0 +1,12 @@ +'use strict'; + +module.exports = { + name: 'AuthenCoreSDK', + version: '0.1.0', + type: 'solution', + projects: [ + 'meshy/meshy', + 'meshy/sample', + 'meshy/client-sample' + ] +}; diff --git a/deps/meshy/kake/deps.kake b/deps/meshy/kake/deps.kake new file mode 100644 index 0000000..1f8aae4 --- /dev/null +++ b/deps/meshy/kake/deps.kake @@ -0,0 +1,4 @@ +'use strict'; + +module.exports = { +}; diff --git a/deps/meshy/kake/meshy/client-sample/Kakefile b/deps/meshy/kake/meshy/client-sample/Kakefile new file mode 100644 index 0000000..2ac20e7 --- /dev/null +++ b/deps/meshy/kake/meshy/client-sample/Kakefile @@ -0,0 +1,26 @@ +'use strict'; + +module.exports = { + name: 'meshy-client-sample', + version: '0.6.1', + type: 'cpp', + target: 'executable', + targetPath: '../../../target', + compiler: { + cxx: 'g++', + cxxflags: ['-std=c++11', '-fPIC'], + defines: ['OS_LINUX'], + src: [ + '../../../src/client_sample.cpp' + ], + includePaths: [ + '../../../include' + ] + }, + linker: { + ld: 'g++', + }, + dependencies: { + meshy: 'latest' + } +}; diff --git a/deps/meshy/kake/meshy/meshy/Kakefile b/deps/meshy/kake/meshy/meshy/Kakefile new file mode 100644 index 0000000..90bb4d5 --- /dev/null +++ b/deps/meshy/kake/meshy/meshy/Kakefile @@ -0,0 +1,29 @@ +'use strict'; + +module.exports = { + name: 'meshy', + version: '0.6.1', + type: 'cpp', + target: 'dynamic_library', + targetPath: '../../../target', + compiler: { + cxx: 'g++', + cxxflags: ['-std=c++11', '-fPIC'], + defines: ['OS_LINUX'], + src: [ + '../../../src/PackageDataSink.cpp', + '../../../src/eventqueueloop.cpp', + '../../../src/net.cpp', + '../../../src/epoll/', + '../../../src/utils/', + '../../../src/http/' + ], + includePaths: [ + '../../../include' + ] + }, + linker: { + ld: 'g++', + ldflags: ['-lpthread'] + } +}; diff --git a/deps/meshy/kake/meshy/sample/Kakefile b/deps/meshy/kake/meshy/sample/Kakefile new file mode 100644 index 0000000..a6468f4 --- /dev/null +++ b/deps/meshy/kake/meshy/sample/Kakefile @@ -0,0 +1,26 @@ +'use strict'; + +module.exports = { + name: 'meshy-sample', + version: '0.6.1', + type: 'cpp', + target: 'executable', + targetPath: '../../../target', + compiler: { + cxx: 'g++', + cxxflags: ['-std=c++11', '-fPIC'], + defines: ['OS_LINUX'], + src: [ + '../../../src/sample.cpp' + ], + includePaths: [ + '../../../include' + ] + }, + linker: { + ld: 'g++', + }, + dependencies: { + meshy: 'latest' + } +}; diff --git a/deps/meshy/src/PackageDataSink.cpp b/deps/meshy/src/PackageDataSink.cpp index 95dd282..22149e3 100755 --- a/deps/meshy/src/PackageDataSink.cpp +++ b/deps/meshy/src/PackageDataSink.cpp @@ -53,7 +53,7 @@ namespace meshy { } } - int32_t PackageDataSink::OnDataIndication(IStream *stream, const char *buf, int64_t bytes) { + int32_t PackageDataSink::Write(IStream *stream, const char *buf, int64_t bytes) { _data.Concat(ByteArray(buf, bytes)); // The package is Complete if (_data.size() >= _totalSize) { @@ -64,7 +64,6 @@ namespace meshy { _totalSize = 0; } - return bytes; } } diff --git a/deps/meshy/src/epoll/EPollClient.cpp b/deps/meshy/src/epoll/EPollClient.cpp index 0cae70b..4c952c8 100755 --- a/deps/meshy/src/epoll/EPollClient.cpp +++ b/deps/meshy/src/epoll/EPollClient.cpp @@ -16,88 +16,88 @@ * limitations under the license. */ -#include -#include -#include -#include "epoll/EPollClient.h" -#include "utils/common_utils.h" -#include "epoll/EpollLoop.h" - -namespace meshy { - void EPollClient::Connect(const std::string& host, int port) { - struct sockaddr_in serv_addr; - - bzero((char *) &serv_addr, sizeof(serv_addr)); - serv_addr.sin_family = AF_INET; - serv_addr.sin_addr.s_addr = inet_addr(host.c_str()); - serv_addr.sin_port = htons(port); - - meshy::SetNonBlocking(GetNativeSocket()); - - connect(GetNativeSocket(), (struct sockaddr *) &serv_addr, sizeof(serv_addr)); - } - - EPollClientPtr EPollClient::Connect(const std::string &ip, int32_t port, DataSink* dataSink) { - int32_t clientSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); - - // Connect - EPollClientPtr client = EPollClientPtr(new EPollClient(clientSocket)); - client->SetDataSink(dataSink); - client->Connect(ip, port); - - // TODO: Add to epoll loop - EPollLoop *ePollLoop = EPollLoop::Get(); - - client->_events = EPOLLIN | EPOLLET; - if ( ePollLoop->AddEpollEvents(client->_events, clientSocket) == -1 ) { - perror("epoll_ctl: add"); - exit(EXIT_FAILURE); - } - - ePollLoop->AddStream(client); - - return client; - } - - int32_t EPollClient::Receive(char *buffer, int32_t bufferSize, int32_t &readSize) { - readSize = 0; - int32_t nread = 0; - NativeSocketEvent ev; - - while ((nread = read(GetNativeSocket(), buffer + readSize, bufferSize - 1)) > 0) { - readSize += nread; - } - - return nread; - } - - int32_t EPollClient::Send(const meshy::ByteArray& byteArray) { - TRACE_DEBUG("EPollConnection::Send"); - - struct epoll_event ev; - NativeSocket clientSocket = GetNativeSocket(); - - if ( EPollLoop::Get()->ModifyEpollEvents(_events | EPOLLOUT, clientSocket) ) { - // TODO: MARK ERASE - TRACE_ERROR("FATAL epoll_ctl: mod failed!"); - } - - const char *buf = byteArray.data(); - int32_t size = byteArray.size(); - int32_t n = size; - - while (n > 0) { - int32_t nwrite; - nwrite = write(clientSocket, buf + size - n, n); - if (nwrite < n) { - if (nwrite == -1 && errno != EAGAIN) { - TRACE_ERROR("FATAL write data to peer failed!"); - } - break; - } - n -= nwrite; - } - - return 0; - } -} +#include +#include +#include +#include "epoll/EPollClient.h" +#include "utils/common_utils.h" +#include "epoll/EPollLoop.h" + +namespace meshy { + void EPollClient::Connect(const std::string& host, int port) { + struct sockaddr_in serv_addr; + + bzero((char *) &serv_addr, sizeof(serv_addr)); + serv_addr.sin_family = AF_INET; + serv_addr.sin_addr.s_addr = inet_addr(host.c_str()); + serv_addr.sin_port = htons(port); + + meshy::SetNonBlocking(GetNativeSocket()); + + connect(GetNativeSocket(), (struct sockaddr *) &serv_addr, sizeof(serv_addr)); + } + + EPollClientPtr EPollClient::Connect(const std::string &ip, int32_t port, DataSink* dataSink) { + int32_t clientSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); + + // Connect + EPollClientPtr client = EPollClientPtr(new EPollClient(clientSocket)); + client->SetDataSink(dataSink); + client->Connect(ip, port); + + // TODO: Add to epoll loop + EPollLoop *ePollLoop = EPollLoop::Get(); + + client->_events = EPOLLIN | EPOLLET; + if ( ePollLoop->AddEpollEvents(client->_events, clientSocket) == -1 ) { + perror("epoll_ctl: add"); + exit(EXIT_FAILURE); + } + + ePollLoop->AddStream(client); + + return client; + } + + int32_t EPollClient::Receive(char *buffer, int32_t bufferSize, int32_t &readSize) { + readSize = 0; + int32_t nread = 0; + NativeSocketEvent ev; + + while ((nread = read(GetNativeSocket(), buffer + readSize, bufferSize - 1)) > 0) { + readSize += nread; + } + + return nread; + } + + int32_t EPollClient::Send(const meshy::ByteArray& byteArray) { + TRACE_DEBUG("EPollConnection::Send"); + + struct epoll_event ev; + NativeSocket clientSocket = GetNativeSocket(); + + if ( EPollLoop::Get()->ModifyEpollEvents(_events | EPOLLOUT, clientSocket) ) { + // TODO: MARK ERASE + TRACE_ERROR("FATAL epoll_ctl: mod failed!"); + } + + const char *buf = byteArray.data(); + int32_t size = byteArray.size(); + int32_t n = size; + + while (n > 0) { + int32_t nwrite; + nwrite = write(clientSocket, buf + size - n, n); + if (nwrite < n) { + if (nwrite == -1 && errno != EAGAIN) { + TRACE_ERROR("FATAL write data to peer failed!"); + } + break; + } + n -= nwrite; + } + + return 0; + } +} diff --git a/deps/meshy/src/epoll/EPollConnection.cpp b/deps/meshy/src/epoll/EPollConnection.cpp index a3feb5a..cab4a3c 100755 --- a/deps/meshy/src/epoll/EPollConnection.cpp +++ b/deps/meshy/src/epoll/EPollConnection.cpp @@ -16,10 +16,10 @@ * limitations under the license. */ -#include "epoll/EpollLoop.h" +#include "epoll/EPollLoop.h" #include "utils/logger.h" #include #include "bytearray.h" namespace meshy { -} \ No newline at end of file +} diff --git a/deps/meshy/src/epoll/EPollServer.cpp b/deps/meshy/src/epoll/EPollServer.cpp index 6da15ec..523fb2c 100755 --- a/deps/meshy/src/epoll/EPollServer.cpp +++ b/deps/meshy/src/epoll/EPollServer.cpp @@ -16,117 +16,109 @@ * limitations under the license. */ -#include "epoll/EPollServer.h" -#include "epoll/EPollLoop.h" -#include "utils/common_utils.h" -#include "utils/logger.h" -#include -#include -#include - -#ifndef DISABLE_ASSERT -#ifdef assert -#undef assert -#endif - -#define assert(x) -#endif - -namespace meshy { - int32_t EPollServer::_Bind(const std::string& host, int32_t port) { - int32_t listenfd; - if ((listenfd = socket(AF_INET, SOCK_STREAM, 0)) < 0) { - TRACE_ERROR("Create socket failed!"); - exit(1); - } - - SetNativeSocket(listenfd); - int32_t option = 1; - setsockopt(listenfd, SOL_SOCKET, SO_REUSEADDR, &option, sizeof(option)); - - // make socket non-blocking - meshy::SetNonBlocking(listenfd); - - NativeSocketAddress addr; - bzero(&addr, sizeof(addr)); - addr.sin_family = AF_INET; - addr.sin_port = htons(port); - addr.sin_addr.s_addr = inet_addr(host.c_str()); - - int32_t errorCode = bind(listenfd, (struct sockaddr *) &addr, sizeof(addr)); - if (errorCode < 0) { - TRACE_ERROR("Bind socket failed!"); - assert(0); - return errorCode; - } - - return 0; - } - - int32_t EPollServer::Listen(const std::string& host, int32_t port, int32_t backlog) { - _Bind(host, port); - - int32_t listenfd = GetNativeSocket(); - - int32_t errorCode = listen(listenfd, backlog); - if (-1 == errorCode) { - TRACE_ERROR("Listen socket failed!"); - assert(0); - return errorCode; - } - - errorCode = EPollLoop::Get()->AddEpollEvents(EPOLLIN, listenfd); - - if (errorCode == -1) { - TRACE_ERROR("FATAL epoll_ctl: listen_sock!"); - assert(0); - return errorCode; - } - -<<<<<<< HEAD -======= - this->SetDataSink(dataSink); ->>>>>>> c3c3bbe5a3163254407f40c2d102dcdc4c4383c1 - EPollLoop::Get()->AddServer(listenfd, this); - } - - EPollConnectionPtr EPollServer::Accept(int32_t sockfd) { - int32_t conn_sock; - int32_t addrlen; - int32_t remote; - - int32_t listenfd = GetNativeSocket(); - while ((conn_sock = accept(listenfd, (struct sockaddr *) &remote, - (socklen_t * ) & addrlen)) > 0) { - meshy::SetNonBlocking(conn_sock); - - NativeSocketEvent ev; - ev.events = EPOLLIN | EPOLLET; - ev.data.fd = conn_sock; - - if (epoll_ctl(sockfd, EPOLL_CTL_ADD, conn_sock, &ev) == -1) { - perror("epoll_ctl: add"); - exit(EXIT_FAILURE); - } - - EPollConnectionPtr connection = std::make_shared(conn_sock); -<<<<<<< HEAD - if ( _connectHandler ) { - _connectHandler(connection.get()); - } - -======= - connection->SetDataSink(this->GetDataSink()); ->>>>>>> c3c3bbe5a3163254407f40c2d102dcdc4c4383c1 - return connection; - } // while - - if (conn_sock == -1) { - if (errno != EAGAIN && errno != ECONNABORTED - && errno != EPROTO && errno != EINTR) - perror("accept"); - } - - return EPollConnectionPtr(nullptr); - } +#include "epoll/EPollServer.h" +#include "epoll/EPollLoop.h" +#include "utils/common_utils.h" +#include "utils/logger.h" +#include +#include +#include + +#ifndef DISABLE_ASSERT +#ifdef assert +#undef assert +#endif + +#define assert(x) +#endif + +namespace meshy { + int32_t EPollServer::_Bind(const std::string& host, int32_t port) { + int32_t listenfd; + if ((listenfd = socket(AF_INET, SOCK_STREAM, 0)) < 0) { + TRACE_ERROR("Create socket failed!"); + exit(1); + } + + SetNativeSocket(listenfd); + int32_t option = 1; + setsockopt(listenfd, SOL_SOCKET, SO_REUSEADDR, &option, sizeof(option)); + + // make socket non-blocking + meshy::SetNonBlocking(listenfd); + + NativeSocketAddress addr; + bzero(&addr, sizeof(addr)); + addr.sin_family = AF_INET; + addr.sin_port = htons(port); + addr.sin_addr.s_addr = inet_addr(host.c_str()); + + int32_t errorCode = bind(listenfd, (struct sockaddr *) &addr, sizeof(addr)); + if (errorCode < 0) { + TRACE_ERROR("Bind socket failed!"); + assert(0); + return errorCode; + } + + return 0; + } + + int32_t EPollServer::Listen(const std::string& host, int32_t port, int32_t backlog) { + _Bind(host, port); + + int32_t listenfd = GetNativeSocket(); + + int32_t errorCode = listen(listenfd, backlog); + if (-1 == errorCode) { + TRACE_ERROR("Listen socket failed!"); + assert(0); + return errorCode; + } + + errorCode = EPollLoop::Get()->AddEpollEvents(EPOLLIN, listenfd); + + if (errorCode == -1) { + TRACE_ERROR("FATAL epoll_ctl: listen_sock!"); + assert(0); + return errorCode; + } + + EPollLoop::Get()->AddServer(listenfd, this); + } + + EPollConnectionPtr EPollServer::Accept(int32_t sockfd) { + int32_t conn_sock = 0; + int32_t addrlen = 0; + int32_t remote = 0; + + int32_t listenfd = GetNativeSocket(); + while ((conn_sock = accept(listenfd, (struct sockaddr *) &remote, + (socklen_t * ) & addrlen)) > 0) { + meshy::SetNonBlocking(conn_sock); + + NativeSocketEvent ev; + ev.events = EPOLLIN | EPOLLET; + ev.data.fd = conn_sock; + + if (epoll_ctl(sockfd, EPOLL_CTL_ADD, conn_sock, &ev) == -1) { + perror("epoll_ctl: add"); + exit(EXIT_FAILURE); + } + + EPollConnectionPtr connection = std::make_shared(conn_sock); + if ( _connectHandler ) { + _connectHandler(connection.get()); + } + + return connection; + } // while + + if (conn_sock == -1) { + if (errno != EAGAIN && errno != ECONNABORTED + && errno != EPROTO && errno != EINTR) + perror("accept"); + } + + return EPollConnectionPtr(nullptr); + } } diff --git a/deps/meshy/src/epoll/EPollStream.cpp b/deps/meshy/src/epoll/EPollStream.cpp index e17a049..30c727c 100755 --- a/deps/meshy/src/epoll/EPollStream.cpp +++ b/deps/meshy/src/epoll/EPollStream.cpp @@ -22,7 +22,7 @@ #include "epoll/EPollStream.h" -#include "epoll/EpollLoop.h" +#include "epoll/EPollLoop.h" #include "utils/logger.h" #include #include "bytearray.h" diff --git a/deps/meshy/src/epoll/epollloop.cpp b/deps/meshy/src/epoll/EpollLoop.cpp similarity index 97% rename from deps/meshy/src/epoll/epollloop.cpp rename to deps/meshy/src/epoll/EpollLoop.cpp index 6614301..c6cb307 100755 --- a/deps/meshy/src/epoll/epollloop.cpp +++ b/deps/meshy/src/epoll/EpollLoop.cpp @@ -16,7 +16,7 @@ * limitations under the license. */ -#include "epoll/EpollLoop.h" +#include "epoll/EPollLoop.h" #include "utils/logger.h" #include #include @@ -171,8 +171,8 @@ namespace meshy { { TRACE_DEBUG("_Enqueue"); - if ( stream->GetDataIndication() ) { - stream->GetDataIndication()(buf, nread); + if ( stream->GetDataHandler() ) { + stream->GetDataHandler()(buf, nread); } } diff --git a/deps/meshy/src/http/HttpConnection.cpp b/deps/meshy/src/http/HttpConnection.cpp index f4c74a0..7f124c0 100755 --- a/deps/meshy/src/http/HttpConnection.cpp +++ b/deps/meshy/src/http/HttpConnection.cpp @@ -26,10 +26,10 @@ namespace meshy { _connection(connection) { std::cout << _connection << std::endl; auto tcpDataHandler = std::bind(&HttpConnection::HandleData, this, std::placeholders::_1, std::placeholders::_2); - _connection->OnDataIndication(tcpDataHandler); + _connection->OnData(tcpDataHandler); } - void HttpConnection::HandleData(const char *buffer, int64_t size) { + int HttpConnection::HandleData(const char *buffer, int64_t size) { std::cout << buffer << std::endl; std::cout << size << std::endl; std::string requestText(buffer, size); diff --git a/deps/meshy/src/http/HttpServer.cpp b/deps/meshy/src/http/HttpServer.cpp index c1a08dc..abe1885 100755 --- a/deps/meshy/src/http/HttpServer.cpp +++ b/deps/meshy/src/http/HttpServer.cpp @@ -30,7 +30,7 @@ namespace meshy { void HttpServer::Listen(const std::string &host, int port, int backlog) { _server.Listen(host, port, backlog); - _server.OnConnectIndication([this](IStream* stream) { + _server.OnConnect([this](IStream* stream) { TcpConnection* connection = dynamic_cast(stream); HttpConnection* httpConnection = new HttpConnection(connection); if ( _connectionHandler ) { diff --git a/deps/meshy/src/sample.cpp b/deps/meshy/src/sample.cpp index 0f70fe3..4256013 100755 --- a/deps/meshy/src/sample.cpp +++ b/deps/meshy/src/sample.cpp @@ -52,40 +52,37 @@ int main() { meshy::PackageDataSink dataSink(&mainEventQueue); - meshy::HttpServer server; - server.Listen("127.0.0.1", DefaultPort); - server.OnConnection([=](meshy::HttpConnection* connection) { - connection->OnRequest([connection](const meshy::HttpRequest& request) { - std::cout << "Request arrived" << std::endl; - std::cout << request.GetMethod() << std::endl; - std::cout << request.GetPath() << std::endl; - std::cout << request.GetVersion() << std::endl; +// meshy::HttpServer server; +// server.Listen("127.0.0.1", DefaultPort); +// server.OnConnection([=](meshy::HttpConnection* connection) { +// connection->OnRequest([connection](const meshy::HttpRequest& request) { +// std::cout << "Request arrived" << std::endl; +// std::cout << request.GetMethod() << std::endl; +// std::cout << request.GetPath() << std::endl; +// std::cout << request.GetVersion() << std::endl; - meshy::HttpResponse response; - response.SetVersion("HTTP/1.1"); - response.SetStatusCode(200); - response.SetStatusMessage("OK"); - response.SetContent("Hello! Sink in Thread!"); +// meshy::HttpResponse response; +// response.SetVersion("HTTP/1.1"); +// response.SetStatusCode(200); +// response.SetStatusMessage("OK"); +// response.SetContent("Hello! Sink in Thread!"); - connection->SendResponse(response); - }); +// connection->SendResponse(response); +// }); - connection->OnData([connection](const std::string& data) { - std::cout << "Data arrived" << std::endl; - std::cout << data << std::endl; - }); - }); +// connection->OnData([connection](const std::string& data) { +// std::cout << "Data arrived" << std::endl; +// std::cout << data << std::endl; +// }); +// }); - //meshy::TcpServer server; + meshy::TcpServer server; - //meshy::PackageDataSink* packageDataSink = &dataSink; - //server.Listen("127.0.0.1", DefaultPort); - //server.OnConnectIndication([=](meshy::IStream* stream) { - // std::cout << stream << std::endl; - // stream->OnDataIndication([packageDataSink, stream](const char* buf, int64_t size) mutable { - // packageDataSink->OnDataIndication(stream, buf, size); - // }); - //}); + meshy::PackageDataSink* packageDataSink = &dataSink; + server.Listen("127.0.0.1", DefaultPort); + server.OnConnect([=](meshy::IStream* stream) { + stream->OnData(packageDataSink->StreamDataHandler(stream)); + }); SampleEventQueueLoop sampleQueue(&mainEventQueue); sampleQueue.Start(); diff --git a/deps/meshy/src/utils/logger.cpp b/deps/meshy/src/utils/logger.cpp index c2b9f50..d4f181f 100755 --- a/deps/meshy/src/utils/logger.cpp +++ b/deps/meshy/src/utils/logger.cpp @@ -70,7 +70,7 @@ namespace meshy { _fileStream = new std::ofstream(); std::ios_base::openmode mode = std::ios_base::out; mode |= std::ios_base::trunc; - _fileStream->Open(fileName, mode); + _fileStream->open(fileName, mode); // Error handling if (!_fileStream->is_open()) { @@ -110,4 +110,4 @@ namespace meshy { *_fileStream << log << std::endl; } } -} \ No newline at end of file +} diff --git a/deps/meshy/target/bin/linux/x64/Release/meshy-client-sample b/deps/meshy/target/bin/linux/x64/Release/meshy-client-sample new file mode 100755 index 0000000..3336ee5 Binary files /dev/null and b/deps/meshy/target/bin/linux/x64/Release/meshy-client-sample differ diff --git a/deps/meshy/target/bin/linux/x64/Release/meshy-sample b/deps/meshy/target/bin/linux/x64/Release/meshy-sample new file mode 100755 index 0000000..c8a10ab Binary files /dev/null and b/deps/meshy/target/bin/linux/x64/Release/meshy-sample differ diff --git a/deps/meshy/target/build/linux/x64/Release/Makefile b/deps/meshy/target/build/linux/x64/Release/Makefile new file mode 100644 index 0000000..644564c --- /dev/null +++ b/deps/meshy/target/build/linux/x64/Release/Makefile @@ -0,0 +1,12 @@ +all: + cd meshy;make + cd meshy-sample;make + cd meshy-client-sample;make +clean: + cd meshy;make clean + cd meshy-sample;make clean + cd meshy-client-sample;make clean +install: + cd meshy;make install + cd meshy-sample;make install + cd meshy-client-sample;make install \ No newline at end of file diff --git a/deps/kake/Changelog b/deps/meshy/target/build/linux/x64/Release/Makefile.deps old mode 100755 new mode 100644 similarity index 100% rename from deps/kake/Changelog rename to deps/meshy/target/build/linux/x64/Release/Makefile.deps diff --git a/deps/meshy/target/build/linux/x64/Release/meshy-client-sample/Makefile b/deps/meshy/target/build/linux/x64/Release/meshy-client-sample/Makefile new file mode 100644 index 0000000..d4c59f0 --- /dev/null +++ b/deps/meshy/target/build/linux/x64/Release/meshy-client-sample/Makefile @@ -0,0 +1,44 @@ +-include Makefile.config + +-include ../Makefile.deps + +OBJS := __-__-__-__-__-__-src-client__sample_cpp.o + +all: meshy-client-sample + +meshy-client-sample: $(OBJS) + $(LD) $(OBJS) -o meshy-client-sample -L"../meshy" -lmeshy + +install: ../../../../../bin/linux/x64/Release/meshy-client-sample + +../../../../../bin/linux/x64/Release/meshy-client-sample: meshy-client-sample + cp meshy-client-sample ../../../../../bin/linux/x64/Release + +clean: + rm -f meshy-client-sample + rm -f *.o + +__-__-__-__-__-__-src-client__sample_cpp.o: ../../../../../../src/client_sample.cpp \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/eventqueue.h \ + ../../../../../../include/eventqueueloop.h \ + ../../../../../../include/loop.h ../../../../../../include/IoLoop.h \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/utils/logger.h + $(CXX) ../../../../../../src/client_sample.cpp -c -o $@ -I../../../../../../include -I../../../../../../include -O2 -std=c++11 -fPIC -DOS_LINUX + diff --git a/deps/meshy/target/build/linux/x64/Release/meshy-sample/Makefile b/deps/meshy/target/build/linux/x64/Release/meshy-sample/Makefile new file mode 100644 index 0000000..b4cb96b --- /dev/null +++ b/deps/meshy/target/build/linux/x64/Release/meshy-sample/Makefile @@ -0,0 +1,54 @@ +-include Makefile.config + +-include ../Makefile.deps + +OBJS := __-__-__-__-__-__-src-sample_cpp.o + +all: meshy-sample + +meshy-sample: $(OBJS) + $(LD) $(OBJS) -o meshy-sample -L"../meshy" -lmeshy + +install: ../../../../../bin/linux/x64/Release/meshy-sample + +../../../../../bin/linux/x64/Release/meshy-sample: meshy-sample + cp meshy-sample ../../../../../bin/linux/x64/Release + +clean: + rm -f meshy-sample + rm -f *.o + +__-__-__-__-__-__-src-sample_cpp.o: ../../../../../../src/sample.cpp \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/eventqueue.h \ + ../../../../../../include/eventqueueloop.h \ + ../../../../../../include/loop.h ../../../../../../include/IoLoop.h \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/utils/logger.h \ + ../../../../../../include/Meshy.h \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/rest/HttpServer.h \ + ../../../../../../include/rest/HttpConnection.h \ + ../../../../../../include/rest/HttpRequest.h \ + ../../../../../../include/rest/HttpContext.h \ + ../../../../../../include/rest/HttpResponse.h + $(CXX) ../../../../../../src/sample.cpp -c -o $@ -I../../../../../../include -I../../../../../../include -O2 -std=c++11 -fPIC -DOS_LINUX + diff --git a/deps/meshy/target/build/linux/x64/Release/meshy/Makefile b/deps/meshy/target/build/linux/x64/Release/meshy/Makefile new file mode 100644 index 0000000..ed500b4 --- /dev/null +++ b/deps/meshy/target/build/linux/x64/Release/meshy/Makefile @@ -0,0 +1,287 @@ +-include Makefile.config + +-include ../Makefile.deps + +OBJS := __-__-__-__-__-__-src-PackageDataSink_cpp.o \ +__-__-__-__-__-__-src-eventqueueloop_cpp.o \ +__-__-__-__-__-__-src-net_cpp.o \ +__-__-__-__-__-__-src-epoll-EPollClient_cpp.o \ +__-__-__-__-__-__-src-epoll-EPollConnection_cpp.o \ +__-__-__-__-__-__-src-epoll-EPollServer_cpp.o \ +__-__-__-__-__-__-src-epoll-EPollStream_cpp.o \ +__-__-__-__-__-__-src-epoll-EpollLoop_cpp.o \ +__-__-__-__-__-__-src-utils-String_cpp.o \ +__-__-__-__-__-__-src-utils-common__utils_cpp.o \ +__-__-__-__-__-__-src-utils-logger_cpp.o \ +__-__-__-__-__-__-src-utils-thread__pool_cpp.o \ +__-__-__-__-__-__-src-utils-time_cpp.o \ +__-__-__-__-__-__-src-http-HttpConnection_cpp.o \ +__-__-__-__-__-__-src-http-HttpContext_cpp.o \ +__-__-__-__-__-__-src-http-HttpRequest_cpp.o \ +__-__-__-__-__-__-src-http-HttpResponse_cpp.o \ +__-__-__-__-__-__-src-http-HttpServer_cpp.o + +all: libmeshy.so + +libmeshy.so: $(OBJS) + $(LD) $(OBJS) -shared -o libmeshy.so -lpthread + +install: ../../../../../lib/linux/x64/Release/libmeshy.so + +../../../../../lib/linux/x64/Release/libmeshy.so: libmeshy.so + cp libmeshy.so ../../../../../lib/linux/x64/Release + +clean: + rm -f libmeshy.so + rm -f *.o + +__-__-__-__-__-__-src-PackageDataSink_cpp.o: ../../../../../../src/PackageDataSink.cpp \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/eventqueue.h \ + ../../../../../../include/utils/logger.h \ + ../../../../../../include/rest/HttpContext.h \ + ../../../../../../include/rest/HttpRequest.h \ + ../../../../../../include/rest/HttpResponse.h + $(CXX) ../../../../../../src/PackageDataSink.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-eventqueueloop_cpp.o: ../../../../../../src/eventqueueloop.cpp \ + ../../../../../../include/eventqueueloop.h \ + ../../../../../../include/loop.h ../../../../../../include/eventqueue.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h + $(CXX) ../../../../../../src/eventqueueloop.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-net_cpp.o: ../../../../../../src/net.cpp ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h + $(CXX) ../../../../../../src/net.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-epoll-EPollClient_cpp.o: ../../../../../../src/epoll/EPollClient.cpp \ + ../../../../../../include/utils/logger.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/loop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/utils/common_utils.h + $(CXX) ../../../../../../src/epoll/EPollClient.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-epoll-EPollConnection_cpp.o: ../../../../../../src/epoll/EPollConnection.cpp \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/loop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/utils/logger.h + $(CXX) ../../../../../../src/epoll/EPollConnection.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-epoll-EPollServer_cpp.o: ../../../../../../src/epoll/EPollServer.cpp \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/loop.h \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/utils/common_utils.h \ + ../../../../../../include/utils/logger.h + $(CXX) ../../../../../../src/epoll/EPollServer.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-epoll-EPollStream_cpp.o: ../../../../../../src/epoll/EPollStream.cpp \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/loop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/utils/logger.h + $(CXX) ../../../../../../src/epoll/EPollStream.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-epoll-EpollLoop_cpp.o: ../../../../../../src/epoll/EpollLoop.cpp \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/loop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/utils/logger.h + $(CXX) ../../../../../../src/epoll/EpollLoop.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-utils-String_cpp.o: ../../../../../../src/utils/String.cpp \ + ../../../../../../include/utils/String.h + $(CXX) ../../../../../../src/utils/String.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-utils-common__utils_cpp.o: ../../../../../../src/utils/common_utils.cpp \ + ../../../../../../include/utils/common_utils.h + $(CXX) ../../../../../../src/utils/common_utils.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-utils-logger_cpp.o: ../../../../../../src/utils/logger.cpp \ + ../../../../../../include/utils/logger.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/utils/time.h + $(CXX) ../../../../../../src/utils/logger.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-utils-thread__pool_cpp.o: ../../../../../../src/utils/thread_pool.cpp \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/utils/logger.h + $(CXX) ../../../../../../src/utils/thread_pool.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-utils-time_cpp.o: ../../../../../../src/utils/time.cpp \ + ../../../../../../include/utils/time.h + $(CXX) ../../../../../../src/utils/time.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-http-HttpConnection_cpp.o: ../../../../../../src/http/HttpConnection.cpp \ + ../../../../../../include/Meshy.h ../../../../../../include/IoLoop.h \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/loop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/rest/HttpConnection.h \ + ../../../../../../include/rest/HttpRequest.h \ + ../../../../../../include/rest/HttpContext.h \ + ../../../../../../include/rest/HttpResponse.h + $(CXX) ../../../../../../src/http/HttpConnection.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-http-HttpContext_cpp.o: ../../../../../../src/http/HttpContext.cpp \ + ../../../../../../include/rest/HttpContext.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/utils/String.h + $(CXX) ../../../../../../src/http/HttpContext.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-http-HttpRequest_cpp.o: ../../../../../../src/http/HttpRequest.cpp \ + ../../../../../../include/rest/HttpRequest.h \ + ../../../../../../include/rest/HttpContext.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/utils/String.h + $(CXX) ../../../../../../src/http/HttpRequest.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-http-HttpResponse_cpp.o: ../../../../../../src/http/HttpResponse.cpp \ + ../../../../../../include/rest/HttpResponse.h \ + ../../../../../../include/rest/HttpContext.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/utils/String.h + $(CXX) ../../../../../../src/http/HttpResponse.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + +__-__-__-__-__-__-src-http-HttpServer_cpp.o: ../../../../../../src/http/HttpServer.cpp \ + ../../../../../../include/rest/HttpServer.h \ + ../../../../../../include/Meshy.h ../../../../../../include/IoLoop.h \ + ../../../../../../include/epoll/EPollLoop.h \ + ../../../../../../include/loop.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/net.h \ + ../../../../../../include/linux/net_linux.h \ + ../../../../../../include/bytearray.h \ + ../../../../../../include/utils/exendian.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/PackageDataSink.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/utils/thread_pool.h \ + ../../../../../../include/utils/concurrent_queue.h \ + ../../../../../../include/template/utils/thread_pool.tcc \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/DataSink.h \ + ../../../../../../include/linux/common.h \ + ../../../../../../include/epoll/EPollClient.h \ + ../../../../../../include/epoll/EPollConnection.h \ + ../../../../../../include/epoll/EPollServer.h \ + ../../../../../../include/epoll/EPollStream.h \ + ../../../../../../include/rest/HttpConnection.h \ + ../../../../../../include/rest/HttpRequest.h \ + ../../../../../../include/rest/HttpContext.h \ + ../../../../../../include/rest/HttpResponse.h + $(CXX) ../../../../../../src/http/HttpServer.cpp -c -o $@ -I../../../../../../include -O2 -std=c++11 -fPIC -fPIC -DOS_LINUX + diff --git a/deps/meshy/target/run/linux/x64/Release/start-client.sh b/deps/meshy/target/run/linux/x64/Release/start-client.sh new file mode 100755 index 0000000..8c274ca --- /dev/null +++ b/deps/meshy/target/run/linux/x64/Release/start-client.sh @@ -0,0 +1,6 @@ +export LD_LIBRARY_PATH=./ + +cp -uv ../../../../bin/linux/x64/Release/meshy-client-sample . +cp -uv ../../../../lib/linux/x64/Release/libmeshy.so . + +./meshy-client-sample diff --git a/deps/meshy/target/run/linux/x64/Release/start-server.sh b/deps/meshy/target/run/linux/x64/Release/start-server.sh new file mode 100755 index 0000000..f3dddf5 --- /dev/null +++ b/deps/meshy/target/run/linux/x64/Release/start-server.sh @@ -0,0 +1,7 @@ +export LD_LIBRARY_PATH=./ + +cp -uv ../../../../bin/linux/x64/Release/meshy-sample . +cp -uv ../../../../bin/linux/x64/Release/meshy-client-sample . +cp -uv ../../../../lib/linux/x64/Release/libmeshy.so . + +./meshy-sample diff --git a/docs/introduction.md b/docs/introduction.md index 3eac492..242c5f8 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -1,441 +1,450 @@ -## What's Hurricane - -Hurricane is a C++ based open-source distributed real-time processing system. -Different from the batch processing system like Apache Hadoop, -Hurricane uses stream model to process data. It also supports multi-language interfaces, -such as Python, JavaScript, Java and Swift. - -We imitate the interface of Apache Storm and simplify it, -so the developer familiar with Storm can learn the Hurricane easily. - -## Get Started - -Before gaining an insight into Hurricane, we use an example program to illustrate the -usage of Hurricane. - -The example program is used to count the words of all text in data stream. - -At first, we need to define a `spout` to generate the data stream. -For the sake of briefness, the spout we defined only generate the text "Hello World". -So the remaining part of program will calculate the count of word "Hello" and "World". - -Before showng the code, we use a diagram to show the structure of program. - -###HelloWorldSpout.h - -```cpp -#pragma once - -#include "hurricane/spout/ISpout.h" - -class HelloWorldSpout : public hurricane::spout::ISpout { -public: - virtual void Prepare(std::shared_ptr outputCollector) override; - virtual void Cleanup() override; - virtual std::vector DeclareFields() override; - virtual void NextTuple() override; - -private: - std::shared_ptr _outputCollector; -}; - -``` - -The `HelloWorldSpout` is inherited from the ISpout class and override some virtual functions. -Now we explain these member functions: - -* Prepare: Hurricane will call this member function to initialize the spout task. -* Cleanup: Hurricane will call this member function to destroy the spout task. -* DeclareFields: Hurricane will call this member function to get the fields of spout. -* NextTuple: Hurricane will repeated call this member function to get the next tuple. - -Now let we have a look at the implementation of HelloWorldSpout. - -### HelloWorldSpout.cpp - -```cpp -#include "sample/wordcount/HelloWorldSpout.h" - -void HelloWorldSpout::Prepare(std::shared_ptr outputCollector) { - _outputCollector = outputCollector; -} - -void HelloWorldSpout::Cleanup() { -} - -std::vector HelloWorldSpout::DeclareFields() { - return { "sentence" }; -} - -void HelloWorldSpout::NextTuple() { - _outputCollector->Emit({ - "Hello World" - }); -} -``` - -In Prepare member function, we save the outputCollector passed by Hurricane. -We will use the output collector to emit tuples. - -In DeclareFields member function, we return a vector of string. Every element in the vector is a field name. - -In NextTuple member function, we use output collector to emit a `tuple`. -The `tuple` is an array of `Value`, which can store all the basic type variable in C++(including std::string). -The fields declared by DeclareFields is used to name the elements in tuple. - -So we know that the HelloWorldSpout will repeated Emit tuples contains string "Hello Wolrd". -It is a very simple `Spout`. - -### SplitSentenceBolt.h - -Now we have a data generator called `HelloWorldSpout`. But a system must process the data, or it is useless. - -In Hurricane, we use the `Bolt` task to process input data and generate output data. -Now we define a Bolt called `SplitSentenceBolt` to split the sentence in data stream into words. - -```cpp -#pragma once - -#include "hurricane/bolt/IBolt.h" - -class SplitSentenceBolt : public hurricane::bolt::IBolt { -public: - virtual void Prepare(std::shared_ptr outputCollector) override; - virtual void Cleanup() override; - virtual std::vector DeclareFields() override; - virtual void Execute(const hurricane::base::Tuple& tuple) override; - -private: - std::shared_ptr _outputCollector; -}; -``` - -The SplitSentenceBolt is inherited from class `IBolt`, which is the base class of all Bolts. -And the interface of Bolt is similiar to the interface of Spout. It has Prepare, Cleanup and DeclareFields. - -But the last member function of Bolt is `Execute`. -Hurrican will call this function when accept a tuple from other spouts or bolts. -We will put all data processing code int this member function. - -### SplitSentenctBolt.cpp - -Now we will show the implementation of SplitSentenctBolt. - -```cpp -#include "sample/wordcount/SplitSentenceBolt.h" -#include "hurricane/util/StringUtil.h" - -void SplitSentenceBolt::Prepare(std::shared_ptr outputCollector) { - _outputCollector = outputCollector; -} - -void SplitSentenceBolt::Cleanup() { -} - -std::vector SplitSentenceBolt::DeclareFields() { - return{ "word" }; -} - -void SplitSentenceBolt::Execute(const hurricane::base::Tuple& tuple) { - std::string sentence = tuple[0].ToString(); - std::vector words = SplitString(sentence, ' '); - - for ( const std::string& word : words ) { - _outputCollector->Emit({ word }); - } -} -``` - -In DeclareFields function, we return one field called word. - -In Execute function, we get the first elment of tuple and convert it to sting. -Then we call SplitSentenct to split the sentence into words. -At last, we ergodic words and emit every word as an individual tuple. - -So this Bolt will split the sentence from other task, and emit many tuples. -Every tuple contains a word. - -### WordCountBolt.h - -After get all words in the sentences, we need to count all the words. -The `WordCountBolt` class is defined to finish the counting task. - -The definition of WordCountBolt: - -```cpp -#pragma once - -#include "hurricane/bolt/IBolt.h" - -#include -#include -#include - -class WordCountBolt : public hurricane::bolt::IBolt { -public: - virtual void Prepare(std::shared_ptr outputCollector) override; - virtual void Cleanup() override; - virtual std::vector DeclareFields() override; - virtual void Execute(const hurricane::base::Tuple& tuple) override; - -private: - std::shared_ptr _outputCollector; - std::map _wordCounts; -}; -``` - -The WordCountBolt use a map to store count of words. - -### WordCountBolt.cpp - -```cpp -#include "sample/wordcount/WordCountBolt.h" -#include "hurricane/util/StringUtil.h" - -void WordCountBolt::Prepare(std::shared_ptr outputCollector) { - _outputCollector = outputCollector; -} - -void WordCountBolt::Cleanup() { -} - -std::vector WordCountBolt::DeclareFields() { - return{ "word", "count" }; -} - -void WordCountBolt::Execute(const hurricane::base::Tuple& tuple) { - std::string word = tuple[0].ToString(); - - auto wordCountIterator = _wordCounts.find(word); - if ( wordCountIterator == _wordCounts.end() ) { - _wordCounts.insert({ word, 0 }); - wordCountIterator = _wordCounts.find(word); - } - - wordCountIterator->second ++; - - _outputCollector->Emit({ word, wordCountIterator->second }); -} -``` - -In function `DeclareFields`, we define two fields. -The first field is word and the second field is the count of word. - -In function `Execute`, we get the first element of tuple and convert it into string. - -Then we find a record of the word. If record does not exist, we initialize the record with count 0. - -Then we increment the count of word and emit the word and its count. - -### WordCountTopology.cpp - -We have compelete all task components of `Word Count`. -Then we need to create a `Topology` to combine these components. - -```cpp -#include "sample/wordcount/WordCountTopology.h" -#include "sample/wordcount/HelloWorldSpout.h" -#include "sample/wordcount/SplitSentenceBolt.h" -#include "sample/wordcount/WordCountBolt.h" - -#include "hurricane/topology/Topology.h" - -hurricane::topology::Topology* GetTopology() { - hurricane::topology::Topology* topology = new hurricane::topology::Topology("word-count-topology"); - - topology->SetSpout("hello-world-spout", new HelloWorldSpout) - .ParallismHint(1); - - topology->SetBolt("split-sentence-bolt", new SplitSentenceBolt) - .GlobalGroup("hello-world-spout") - .ParallismHint(1); - - topology->SetBolt("word-count-bolt", new WordCountBolt) - .FieldGroup("split-sentence-bolt", "word") - .ParallismHint(2); - - return topology; -} -``` - -The function name must be `GetTopology`. - -In this function, we use the constructor of Topology to create a new Topology. -The parameter of constructor is the name of topology. - -Then we use SetSpout to append a spout to topology. -The first argument is the name of spout task, the second argument is an spout instance. -We use ParallismHint to set the paramllism of taks. - -Here we create a HelloWorldSpout instance and append it to topology. -The task name is `"hello-world-spout"`. - -Then we use SetBolt to append a bolt to topology. -The first argument is the name of bolt task, the second argument is an bolt instance. - -Here we create a SplitSentenceBolt instance and WordCountBolt instance, then append them into topology. -The task names are `"split-sentence-bolt"` and `"word-count-bolt"`. - -We use Global to stream the data from spout to one SplitSentenceBolt -and use Group to stream the data from SplitSentenceBolt to WordCountBolt. - -At last, we return the topology and build it as an dynamic library. -Then we can use Hurricane to load and execute the library. - -## The concepts of Hurricane - -After the example, you can have a rough idea of Hurricane. Now we illustrate some concepts of Hurricane. -If you have learned Apache, you must be familiar with these concepts. - -We discuss following concepts: - -* Topology -* Stream -* Tuple -* Spout -* Bolt -* Stream grouping - -### Topology -The logic for a realtime application is packaged into a Hurricane topology. -A Hurricane topology is analogous to a MapReduce job. -One key difference is that a MapReduce job eventually finishes, -whereas a topology runs forever. Of course, you can kill the topology manually. -A topology is a graph of spouts and bolts that are connected with stream groupings. -We have seen the structure diagram of example program. -You can treat the topology in example as the graph in this diagram. -These concepts are described below. - -### Stream -The stream is an important abstraction in Hurricane. -A stream is an unbounded sequence of tuples that is processed and created in parallel in a distributed fashion. -Streams are defined with a schema that names the fields in the stream's tuples. -Every stream is given an id when declared. Spout and bolt will create and specifying an id for stream automatically. - -### Tuple -Tuple is the data unit transferred in stream. -The spout and bolt need to use tuple to organize the data. -Tuples can contain integers, longs, shorts, characters, floats, doubles and strings. - -### Spout -A spout is a source of streams in a topology. -Generally spouts will read tuples from an external source and emit them into the topology. -Spouts can either be reliable or unreliable. -A reliable spout is capable of replaying a tuple if it failed to be processed by Hurricane, whereas an unreliable spout forgets about the tuple as soon as it is emitted. - -The main member function on spouts is NextTuple. -NextTuple either emits a new tuple into the topology or simply returns if there are no new tuples to emit. - -The other main methods on spouts are Ack and Fail. -These are called when Hurricane detects that a tuple emitted from the spout either successfully completed through the topology or failed to be completed. -Ack and Fail are only called for reliable spouts. - -### Bolt -All processing in topologies is done in bolts. -Bolts can do anything from filtering, functions, aggregations, joins, talking to databases, and more. - -Bolts can do simple stream transformations. -Doing complex stream transformations often requires multiple steps and thus multiple bolts. -For example, transforming a stream of tweets into a stream of trending images requires at least two steps: -a bolt to do a rolling count of retweets for each image, -and one or more bolts to stream out the top X images -(you can do this particular stream transformation in a more scalable way with three bolts than with two). - -The main member function in bolts is the Execute function which takes in as input a new tuple. -Bolts emit new tuples using the OutputCollector object. -Bolts must call the Ack method on the OutputCollector for every tuple they process so that Storm knows when tuples are completed -(and can eventually determine that its safe to ack the original spout tuples). -For the common case of processing an input tuple, emitting 0 or more tuples based on that tuple, and then acking the input tuple, -Hurricane provides an IBasicBolt interface which does the acking automatically. - -Its perfectly fine to launch new threads in bolts that do processing asynchronously. OutputCollector is thread-safe and can be called at any time. - -### Stream grouping -Part of defining a topology is specifying for each bolt which streams it should receive as input. -A stream grouping defines how that stream should be partitioned among the bolt's tasks. - -There are four built-in stream groupings in Hurricane: - -* Random grouping: -Tuples are randomly distributed across the bolt's tasks in a way -such that each bolt is guaranteed to get an equal number of tuples. -* Fields grouping: -The stream is partitioned by the fields specified in the grouping. -For example, if the stream is grouped by the "user-id" field, tuples with the same "user-id" will always go to the same task, but tuples with different "user-id"'s may go to different tasks. -* Global grouping: -The entire stream goes to a single one of the bolt's tasks. -Specifically, it goes to the task with the lowest id. -* Local or random grouping: -If the target bolt has one or more tasks in the same worker process, -tuples will be shuffled to just those in-process tasks. -Otherwise, this acts like a normal shuffle grouping. - -## The architecture of Hurricane -Now we illustrate the architecture of Hurricane simply. -You can gaining an insight into Hurricane throught this chapter. - -The Hurricane is a master-slave based distributed system. -The master node will control all the slave node. -If the master node is down, all system will stop, which is called `single point of failure`. -Hurricane will solve this problem in future. - -### President -President is the master node of Hurricane. A Hurricane cluster can only have one president. -It will start a `CommandServer` and wait for other node to connect to it. - -A CommandServer is a message loop based on TCP/IP protocol, -in which you can register a command and handler. -When a CommandClient send a command to the server, -it will call the registered handler to process the command. -All Command and arguments are serialized and deserialized by a simple binary protocol. - -After a slave node join the president, president will add it to the cluster metadata. - -All topology management command are sent to president. -When you startup a topology, president will find a free executor in cluster metadata -and send command to the slave node owned the executor. -President will try to distribute the computing loads on all slave node to control the load-balance. -If there is no availiable resources, president will report error. - -But now president can't rebalance the computing executors after detecting a manager disconnected. -We will solve this problem in future. - -### Manager -Manager is the slave node of Hurricane. A Hurricane cluster will have a lot of managers. - -Manager will be started and use CommandClient to send Join command to president. -After manager receive the response of president, it will startup a CommandServer -to receive command from other managers. - -Manager will send Heartbeat command to Manager in a fixed period. -If predisent find a manager have detected the manager disconnected for some periods, -it will destroy the connection from this manager. - -When receive the StartTask command, manager will create a new task instance and -deploy it into the executor specified by president. - -After receive tuple from other task, manager will send the tuple to the correct executor -and wait for next command. - -Now we do not consider the problem that executor maybe shutdown, -but it will be solved in next version. - -### Executor -Executor is a thread to execute task. - -An executor is used to execute a spout or bolt. -It has a message loop and wait the message from manager. - -When SpoutExecutor receives the StartSpout message, it will initialize the spout task -and execute NextTuple function repeated until it receive the StopSpout message. - -When BoltExecutor receives the StartBolt message, it will initialize the bolt task -and wait for ExecuteBolt message. The manager will send the ExecuteBolt message to executor -, then executor will execute the Execute function of its bolt task. -When BoltExecutor receives the StopBolt message, it will destroy the bolt task. - -### Task -Task is a spout or bolt which is executed by executor. - -Different from Apache Storm, a Hurricane executor only execute one task, -because it is simple and easy to schedule. +## What's Hurricane + +Hurricane is Hurricane is a C++ based open-source distributed real-time processing system. +Different from the batch processing system like Apache Hadoop, +Hurricane uses stream model to process data. It also supports multi-language interfaces, +such as Python, JavaScript, Java and Swift. + +We imitate the interface of Apache Storm and simplify it, +so the developer familiar with Storm can learn the Hurricane easily. + +## Get Started + +Before gaining an insight into Hurricane, we use an example program to illustrate the +usage of Hurricane. + +The example program is used to count the words of all text in data stream. + +At first, we need to define a `spout` to generate the data stream. +For the sake of briefness, the spout we defined only generate the text "Hello World". +So the remaining part of program will calculate the count of word "Hello" and "World". + +Before showng the code, we use a diagram to show the structure of program. + +![Word Count Diagram](wordcount.png) + +###HelloWorldSpout.h + +```cpp +#pragma once + +#include "hurricane/spout/ISpout.h" + +class HelloWorldSpout : public hurricane::spout::ISpout { +public: + virtual void Prepare(std::shared_ptr outputCollector) override; + virtual void Cleanup() override; + virtual std::vector DeclareFields() override; + virtual void NextTuple() override; + +private: + std::shared_ptr _outputCollector; +}; + +``` + +The `HelloWorldSpout` is inherited from the ISpout class and override some virtual functions. +Now we explain these member functions: + +* Prepare: Hurricane will call this member function to initialize the spout task. +* Cleanup: Hurricane will call this member function to destroy the spout task. +* DeclareFields: Hurricane will call this member function to get the fields of spout. +* NextTuple: Hurricane will repeated call this member function to get the next tuple. + +Now let we have a look at the implementation of HelloWorldSpout. + +### HelloWorldSpout.cpp + +```cpp +#include "sample/wordcount/HelloWorldSpout.h" + +void HelloWorldSpout::Prepare(std::shared_ptr outputCollector) { + _outputCollector = outputCollector; +} + +void HelloWorldSpout::Cleanup() { +} + +std::vector HelloWorldSpout::DeclareFields() { + return { "sentence" }; +} + +void HelloWorldSpout::NextTuple() { + _outputCollector->Emit({ + "Hello World" + }); +} +``` + +In Prepare member function, we save the outputCollector passed by Hurricane. +We will use the output collector to emit tuples. + +In DeclareFields member function, we return a vector of string. Every element in the vector is a field name. + +In NextTuple member function, we use output collector to emit a `tuple`. +The `tuple` is an array of `Value`, which can store all the basic type variable in C++(including std::string). +The fields declared by DeclareFields is used to name the elements in tuple. + +So we know that the HelloWorldSpout will repeated Emit tuples contains string "Hello Wolrd". +It is a very simple `Spout`. + +### SplitSentenceBolt.h + +Now we have a data generator called `HelloWorldSpout`. But a system must process the data, or it is useless. + +In Hurricane, we use the `Bolt` task to process input data and generate output data. +Now we define a Bolt called `SplitSentenceBolt` to split the sentence in data stream into words. + +```cpp +#pragma once + +#include "hurricane/bolt/IBolt.h" + +class SplitSentenceBolt : public hurricane::bolt::IBolt { +public: + virtual void Prepare(std::shared_ptr outputCollector) override; + virtual void Cleanup() override; + virtual std::vector DeclareFields() override; + virtual void Execute(const hurricane::base::Tuple& tuple) override; + +private: + std::shared_ptr _outputCollector; +}; +``` + +The SplitSentenceBolt is inherited from class `IBolt`, which is the base class of all Bolts. +And the interface of Bolt is similiar to the interface of Spout. It has Prepare, Cleanup and DeclareFields. + +But the last member function of Bolt is `Execute`. +Hurrican will call this function when accept a tuple from other spouts or bolts. +We will put all data processing code int this member function. + +### SplitSentenctBolt.cpp + +Now we will show the implementation of SplitSentenctBolt. + +```cpp +#include "sample/wordcount/SplitSentenceBolt.h" +#include "hurricane/util/StringUtil.h" + +void SplitSentenceBolt::Prepare(std::shared_ptr outputCollector) { + _outputCollector = outputCollector; +} + +void SplitSentenceBolt::Cleanup() { +} + +std::vector SplitSentenceBolt::DeclareFields() { + return{ "word" }; +} + +void SplitSentenceBolt::Execute(const hurricane::base::Tuple& tuple) { + std::string sentence = tuple[0].ToString(); + std::vector words = SplitString(sentence, ' '); + + for ( const std::string& word : words ) { + _outputCollector->Emit({ word }); + } +} +``` + +In DeclareFields function, we return one field called word. + +In Execute function, we get the first elment of tuple and convert it to sting. +Then we call SplitSentenct to split the sentence into words. +At last, we ergodic words and emit every word as an individual tuple. + +So this Bolt will split the sentence from other task, and emit many tuples. +Every tuple contains a word. + +### WordCountBolt.h + +After get all words in the sentences, we need to count all the words. +The `WordCountBolt` class is defined to finish the counting task. + +The definition of WordCountBolt: + +```cpp +#pragma once + +#include "hurricane/bolt/IBolt.h" + +#include +#include +#include + +class WordCountBolt : public hurricane::bolt::IBolt { +public: + virtual void Prepare(std::shared_ptr outputCollector) override; + virtual void Cleanup() override; + virtual std::vector DeclareFields() override; + virtual void Execute(const hurricane::base::Tuple& tuple) override; + +private: + std::shared_ptr _outputCollector; + std::map _wordCounts; +}; +``` + +The WordCountBolt use a map to store count of words. + +### WordCountBolt.cpp + +```cpp +#include "sample/wordcount/WordCountBolt.h" +#include "hurricane/util/StringUtil.h" + +void WordCountBolt::Prepare(std::shared_ptr outputCollector) { + _outputCollector = outputCollector; +} + +void WordCountBolt::Cleanup() { +} + +std::vector WordCountBolt::DeclareFields() { + return{ "word", "count" }; +} + +void WordCountBolt::Execute(const hurricane::base::Tuple& tuple) { + std::string word = tuple[0].ToString(); + + auto wordCountIterator = _wordCounts.find(word); + if ( wordCountIterator == _wordCounts.end() ) { + _wordCounts.insert({ word, 0 }); + wordCountIterator = _wordCounts.find(word); + } + + wordCountIterator->second ++; + + _outputCollector->Emit({ word, wordCountIterator->second }); +} +``` + +In function `DeclareFields`, we define two fields. +The first field is word and the second field is the count of word. + +In function `Execute`, we get the first element of tuple and convert it into string. + +Then we find a record of the word. If record does not exist, we initialize the record with count 0. + +Then we increment the count of word and emit the word and its count. + +### WordCountTopology.cpp + +We have compelete all task components of `Word Count`. +Then we need to create a `Topology` to combine these components. + +```cpp +#include "sample/wordcount/WordCountTopology.h" +#include "sample/wordcount/HelloWorldSpout.h" +#include "sample/wordcount/SplitSentenceBolt.h" +#include "sample/wordcount/WordCountBolt.h" + +#include "hurricane/topology/Topology.h" + +hurricane::topology::Topology* GetTopology() { + hurricane::topology::Topology* topology = new hurricane::topology::Topology("word-count-topology"); + + topology->SetSpout("hello-world-spout", new HelloWorldSpout) + .ParallismHint(1); + + topology->SetBolt("split-sentence-bolt", new SplitSentenceBolt) + .GlobalGroup("hello-world-spout") + .ParallismHint(1); + + topology->SetBolt("word-count-bolt", new WordCountBolt) + .FieldGroup("split-sentence-bolt", "word") + .ParallismHint(2); + + return topology; +} +``` + +The function name must be `GetTopology`. + +In this function, we use the constructor of Topology to create a new Topology. +The parameter of constructor is the name of topology. + +Then we use SetSpout to append a spout to topology. +The first argument is the name of spout task, the second argument is an spout instance. +We use ParallismHint to set the paramllism of taks. + +Here we create a HelloWorldSpout instance and append it to topology. +The task name is `"hello-world-spout"`. + +Then we use SetBolt to append a bolt to topology. +The first argument is the name of bolt task, the second argument is an bolt instance. + +Here we create a SplitSentenceBolt instance and WordCountBolt instance, then append them into topology. +The task names are `"split-sentence-bolt"` and `"word-count-bolt"`. + +We use Global to stream the data from spout to one SplitSentenceBolt +and use Group to stream the data from SplitSentenceBolt to WordCountBolt. + +At last, we return the topology and build it as an dynamic library. +Then we can use Hurricane to load and execute the library. + +## The concepts of Hurricane + +After the example, you can have a rough idea of Hurricane. Now we illustrate some concepts of Hurricane. +If you have learned Apache, you must be familiar with these concepts. + +We discuss following concepts: + +* Topology +* Stream +* Tuple +* Spout +* Bolt +* Stream grouping + +### Topology +The logic for a realtime application is packaged into a Hurricane topology. +A Hurricane topology is analogous to a MapReduce job. +One key difference is that a MapReduce job eventually finishes, +whereas a topology runs forever. Of course, you can kill the topology manually. +A topology is a graph of spouts and bolts that are connected with stream groupings. +We have seen the structure diagram of example program. +You can treat the topology in example as the graph in this diagram. +These concepts are described below. + +### Stream +The stream is an important abstraction in Hurricane. +A stream is an unbounded sequence of tuples that is processed and created in parallel in a distributed fashion. +Streams are defined with a schema that names the fields in the stream's tuples. +Every stream is given an id when declared. Spout and bolt will create and specifying an id for stream automatically. + +### Tuple +Tuple is the data unit transferred in stream. +The spout and bolt need to use tuple to organize the data. +Tuples can contain integers, longs, shorts, characters, floats, doubles and strings. + +### Spout +A spout is a source of streams in a topology. +Generally spouts will read tuples from an external source and emit them into the topology. +Spouts can either be reliable or unreliable. +A reliable spout is capable of replaying a tuple if it failed to be processed by Hurricane, whereas an unreliable spout forgets about the tuple as soon as it is emitted. + +The main member function on spouts is NextTuple. +NextTuple either emits a new tuple into the topology or simply returns if there are no new tuples to emit. + +The other main methods on spouts are Ack and Fail. +These are called when Hurricane detects that a tuple emitted from the spout either successfully completed through the topology or failed to be completed. +Ack and Fail are only called for reliable spouts. + +### Bolt +All processing in topologies is done in bolts. +Bolts can do anything from filtering, functions, aggregations, joins, talking to databases, and more. + +Bolts can do simple stream transformations. +Doing complex stream transformations often requires multiple steps and thus multiple bolts. +For example, transforming a stream of tweets into a stream of trending images requires at least two steps: +a bolt to do a rolling count of retweets for each image, +and one or more bolts to stream out the top X images +(you can do this particular stream transformation in a more scalable way with three bolts than with two). + +The main member function in bolts is the Execute function which takes in as input a new tuple. +Bolts emit new tuples using the OutputCollector object. +Bolts must call the Ack method on the OutputCollector for every tuple they process so that Storm knows when tuples are completed +(and can eventually determine that its safe to ack the original spout tuples). +For the common case of processing an input tuple, emitting 0 or more tuples based on that tuple, and then acking the input tuple, +Hurricane provides an IBasicBolt interface which does the acking automatically. + +Its perfectly fine to launch new threads in bolts that do processing asynchronously. OutputCollector is thread-safe and can be called at any time. + +### Stream grouping +Part of defining a topology is specifying for each bolt which streams it should receive as input. +A stream grouping defines how that stream should be partitioned among the bolt's tasks. + +There are four built-in stream groupings in Hurricane: + +* Random grouping: +Tuples are randomly distributed across the bolt's tasks in a way +such that each bolt is guaranteed to get an equal number of tuples. +* Fields grouping: +The stream is partitioned by the fields specified in the grouping. +For example, if the stream is grouped by the "user-id" field, tuples with the same "user-id" will always go to the same task, but tuples with different "user-id"'s may go to different tasks. +* Global grouping: +The entire stream goes to a single one of the bolt's tasks. +Specifically, it goes to the task with the lowest id. +* Local or random grouping: +If the target bolt has one or more tasks in the same worker process, +tuples will be shuffled to just those in-process tasks. +Otherwise, this acts like a normal shuffle grouping. + +## The architecture of Hurricane +Now we illustrate the architecture of Hurricane simply. +You can gaining an insight into Hurricane throught this chapter. + +The Hurricane is a master-slave based distributed system. +The master node will control all the slave node. +If the master node is down, all system will stop, which is called `single point of failure`. +Hurricane will solve this problem in future. + +### President +President is the master node of Hurricane. A Hurricane cluster can only have one president. +It will start a `CommandServer` and wait for other node to connect to it. + +A CommandServer is a message loop based on TCP/IP protocol, +in which you can register a command and handler. +When a CommandClient send a command to the server, +it will call the registered handler to process the command. +All Command and arguments are serialized and deserialized by a simple binary protocol. + +After a slave node join the president, president will add it to the cluster metadata. + +All topology management command are sent to president. +When you startup a topology, president will find a free executor in cluster metadata +and send command to the slave node owned the executor. +President will try to distribute the computing loads on all slave node to control the load-balance. +If there is no availiable resources, president will report error. + +But now president can't rebalance the computing executors after detecting a manager disconnected. +We will solve this problem in future. + +### Manager +Manager is the slave node of Hurricane. A Hurricane cluster will have a lot of managers. + +Manager will be started and use CommandClient to send Join command to president. +After manager receive the response of president, it will startup a CommandServer +to receive command from other managers. + +Manager will send Heartbeat command to Manager in a fixed period. +If predisent find a manager have detected the manager disconnected for some periods, +it will destroy the connection from this manager. + +When receive the StartTask command, manager will create a new task instance and +deploy it into the executor specified by president. + +After receive tuple from other task, manager will send the tuple to the correct executor +and wait for next command. + +Now we do not consider the problem that executor maybe shutdown, +but it will be solved in next version. + +### Executor +Executor is a thread to execute task. + +An executor is used to execute a spout or bolt. +It has a message loop and wait the message from manager. + +When SpoutExecutor receives the StartSpout message, it will initialize the spout task +and execute NextTuple function repeated until it receive the StopSpout message. + +When BoltExecutor receives the StartBolt message, it will initialize the bolt task +and wait for ExecuteBolt message. The manager will send the ExecuteBolt message to executor +, then executor will execute the Execute function of its bolt task. +When BoltExecutor receives the StopBolt message, it will destroy the bolt task. + +### Task +Task is a spout or bolt which is executed by executor. + +Different from Apache Storm, a Hurricane executor only execute one task, +because it is simple and easy to schedule. + +### Summary +At last, let we summarize this chapter by a architecture diagram of Hurricane. + +![Hurricane Architecture](architecture.png) + +Now we advice you to read the code of Hurricane to know more of Hurricane. \ No newline at end of file diff --git a/include/hurricane/base/BlockingQueue.h b/include/hurricane/base/BlockingQueue.h new file mode 100755 index 0000000..fe4050c --- /dev/null +++ b/include/hurricane/base/BlockingQueue.h @@ -0,0 +1,76 @@ +#pragma once + +#include +#include +#include +#include + +namespace hurricane { + namespace base { + template + class BlockingQueue { + public: + BlockingQueue() { + } + + virtual ~BlockingQueue() { + } + + void Push(const T& element) { + std::unique_lock locker(_mutex); + + _queue.push(element); + _emptyCv.notify_one(); + } + + bool Pop(T& element, int milliseconds = 0) { + std::unique_lock locker(_mutex); + + if ( !_queue.size() ) { + if ( milliseconds == 0 ) { + _emptyCv.wait(locker); + } + else { + _emptyCv.wait_for(locker, std::chrono::milliseconds(milliseconds)); + if ( !_queue.size() ) { + return false; + } + } + } + + element = _queue.front(); + _queue.pop(); + + return true; + } + + bool Peek(T& element) const { + std::unique_lock locker(_mutex); + + if ( !_queue.size() ) { + return false; + } + + element = _queue.front(); + return true; + } + + bool Empty() const { + std::unique_lock locker(_mutex); + + return _queue.size() == 0; + } + + int GetSize() const { + std::unique_lock locker(_mutex); + + return _queue.size(); + } + + private: + mutable std::mutex _mutex; + std::condition_variable _emptyCv; + std::queue _queue; + }; + } +} \ No newline at end of file diff --git a/include/hurricane/base/DataPackage.h b/include/hurricane/base/DataPackage.h index 0530d82..4586026 100755 --- a/include/hurricane/base/DataPackage.h +++ b/include/hurricane/base/DataPackage.h @@ -37,27 +37,132 @@ namespace hurricane { virtual int32_t Write(ByteArrayWriter& writer, const Variant& variant) = 0; }; - class IntWritable : public Writable { + class Int32Writable : public Writable { public: std::string GetType() const { - return "int"; + return "int32"; } int32_t Read(ByteArrayReader& reader, Variant& variant) override { int32_t intValue = reader.read(); - variant.SetIntValue(intValue); + variant.SetInt32Value(intValue); return sizeof(int32_t); } int32_t Write(ByteArrayWriter& writer, const Variant& variant) override { - int value = variant.GetIntValue(); + int32_t value = variant.GetInt32Value(); writer.write(value); return sizeof(int32_t); } }; + class Int64Writable : public Writable { + public: + std::string GetType() const { + return "int64"; + } + + int32_t Read(ByteArrayReader& reader, Variant& variant) override { + int64_t intValue = reader.read(); + variant.SetInt64Value(intValue); + + return sizeof(int64_t); + } + + int32_t Write(ByteArrayWriter& writer, const Variant& variant) override { + int64_t value = variant.GetInt64Value(); + writer.write(value); + + return sizeof(int64_t); + } + }; + + class UInt32Writable : public Writable { + public: + std::string GetType() const { + return "uint32"; + } + + int32_t Read(ByteArrayReader& reader, Variant& variant) override { + uint32_t intValue = reader.read(); + variant.SetUInt32Value(intValue); + + return sizeof(uint32_t); + } + + int32_t Write(ByteArrayWriter& writer, const Variant& variant) override { + uint32_t value = variant.GetUInt32Value(); + writer.write(value); + + return sizeof(uint32_t); + } + }; + + class UInt64Writable : public Writable { + public: + std::string GetType() const { + return "uint64"; + } + + int32_t Read(ByteArrayReader& reader, Variant& variant) override { + uint64_t intValue = reader.read(); + variant.SetUInt64Value(intValue); + + return sizeof(uint64_t); + } + + int32_t Write(ByteArrayWriter& writer, const Variant& variant) override { + uint64_t value = variant.GetUInt64Value(); + writer.write(value); + + return sizeof(uint64_t); + } + }; + + class BooleanWritable : public Writable { + public: + std::string GetType() const { + return "boolean"; + } + + int32_t Read(ByteArrayReader& reader, Variant& variant) override { + bool boolValue = reader.read(); + variant.SetBooleanValue(boolValue); + + return sizeof(bool); + } + + int32_t Write(ByteArrayWriter& writer, const Variant& variant) override { + bool value = variant.GetBooleanValue(); + writer.write(value); + + return sizeof(bool); + } + }; + + class FloatWritable : public Writable { + public: + std::string GetType() const { + return "float"; + } + + int32_t Read(ByteArrayReader& reader, Variant& variant) override { + float floatValue = reader.read(); + variant.SetFloatValue(floatValue); + + return sizeof(float); + } + + int32_t Write(ByteArrayWriter& writer, const Variant& variant) override { + float value = variant.GetFloatValue(); + writer.write(value); + + return sizeof(float); + } + }; + class StringWritable : public Writable { public: std::string GetType() const { diff --git a/include/hurricane/base/Library.h b/include/hurricane/base/Library.h new file mode 100644 index 0000000..2dace86 --- /dev/null +++ b/include/hurricane/base/Library.h @@ -0,0 +1,59 @@ +#pragma once + +#include +#include + +#ifdef WIN32 + +#include + +typedef HMODULE LibraryHandle; + +inline LibraryHandle HurricaneLibraryLoad(const std::string& name) { + std::string path = name + ".dll"; + return LoadLibraryA(path.c_str()); +} + +#define HurricaneLibraryFree FreeLibrary + +inline DWORD HurricaneGetLibraryError() { + return GetLastError(); +} + +#else + +#include + +typedef void* LibraryHandle; +inline LibraryHandle HurricaneLibraryLoad(const std::string& name) { + std::string path("lib"); + path += name + ".so"; + LibraryHandle handle = dlopen(path.c_str(), RTLD_NOW); + if ( !handle ) { + std::cerr << dlerror() << std::endl; + exit(EXIT_FAILURE); + } + + return handle; +} +#define HurricaneLibraryFree dlclose + +#define HurricaneGetLibraryError() dlerror() + +#endif + +#ifdef __cplusplus +#include + +template +Function HurricaneLibraryGetSymbol(LibraryHandle libraryHandle, const std::string& libraryName) { +#ifdef WIN32 + return reinterpret_cast(GetProcAddress(libraryHandle, libraryName.c_str())); +#else + return reinterpret_cast(dlsym(libraryHandle, libraryName.c_str())); +#endif +} + +std::string GetLibraryPath(); + +#endif // __cplusplus diff --git a/include/hurricane/base/NetAddress.h b/include/hurricane/base/NetAddress.h index fb19b80..276f82f 100755 --- a/include/hurricane/base/NetAddress.h +++ b/include/hurricane/base/NetAddress.h @@ -19,13 +19,14 @@ #pragma once #include +#include "hurricane/base/Variant.h" namespace hurricane { namespace base { - class NetAddress { + class NetAddress : public Serializable { public: - NetAddress() : _port(0) { - } + NetAddress() : _port(0) { + } NetAddress(const std::string& host, int port) : _host(host), _port(port) { } @@ -46,6 +47,16 @@ namespace base { _port = port; } + virtual void Serialize(Variants& variants) const override { + Variant::Serialize(variants, _host); + Variant::Serialize(variants, _port); + } + + virtual void Deserialize(Variants::const_iterator& it) override { + Variant::Deserialize(it, _host); + Variant::Deserialize(it, _port); + } + private: std::string _host; int _port; diff --git a/include/hurricane/base/Serializer.h b/include/hurricane/base/Serializer.h new file mode 100644 index 0000000..171be0c --- /dev/null +++ b/include/hurricane/base/Serializer.h @@ -0,0 +1,13 @@ +#pragma once + +#include "hurricane/base/Variant.h" + +namespace hurricane { +namespace base { + +class Seiralizer { + +}; + +} +} diff --git a/include/hurricane/base/Values.h b/include/hurricane/base/Values.h index 4ded4f8..0e2e3fe 100755 --- a/include/hurricane/base/Values.h +++ b/include/hurricane/base/Values.h @@ -24,147 +24,11 @@ #include #include #include +#include "hurricane/base/Variant.h" namespace hurricane { namespace base { - - class TypeMismatchException : std::exception { - public: - TypeMismatchException(const std::string& message) : - _message(message) {} - - - const char* what() const noexcept override { - return _message.c_str(); - } - - private: - std::string _message; - }; - - class Value { - public: - enum class Type { - Boolean, - Character, - Int8, - Int16, - Int32, - Int64, - Float, - Double, - String - }; - - union InnerValue { - bool booleanValue; - char characterValue; - int8_t int8Value; - int16_t int16Value; - int32_t int32Value; - int64_t int64Value; - float floatValue; - double doubleValue; - }; - - Value(bool value) : _type(Type::Boolean) { - _value.booleanValue = value; - } - - Value(char value) : _type(Type::Character) { - _value.characterValue = value; - } - - Value(int8_t value) : _type(Type::Int8) { - _value.int8Value = value; - } - - Value(int16_t value) : _type(Type::Int16) { - _value.int16Value = value; - } - - Value(int32_t value) : _type(Type::Int32) { - _value.int32Value = value; - } - - Value(int64_t value) : _type(Type::Int64) { - _value.int64Value = value; - } - - Value(float value) : _type(Type::Float) { - _value.floatValue = value; - } - - Value(double value) : _type(Type::Double) { - _value.doubleValue = value; - } - - Value(const std::string& value) : _type(Type::String) { - _stringValue = value; - } - - Value(const char* value) : Value(std::string(value)) { - } - - bool ToBoolean() const { - if ( _type != Type::Boolean ) { - throw TypeMismatchException("The type of value is not boolean"); - } - } - - int8_t ToInt8() const { - if ( _type != Type::Int8 ) { - throw TypeMismatchException("The type of value is not int8"); - } - - return _value.int8Value; - } - - int16_t ToInt16() const { - if ( _type != Type::Int16 ) { - throw TypeMismatchException("The type of value is not int16"); - } - - return _value.int16Value; - } - - int32_t ToInt32() const { - if ( _type != Type::Int32 ) { - throw TypeMismatchException("The type of value is not int32"); - } - - return _value.int32Value; - } - - int64_t ToInt64() const { - if ( _type != Type::Int64 ) { - throw TypeMismatchException("The type of value is not int64"); - } - - return _value.int64Value; - } - - char ToCharacter() const { - if ( _type != Type::Character ) { - throw TypeMismatchException("The type of value is not character"); - } - - return _value.characterValue; - } - - const std::string& ToString() const { - if ( _type != Type::String ) { - throw TypeMismatchException("The type of value is not string"); - } - - return _stringValue; - } - - private: - Type _type; - InnerValue _value; - std::string _stringValue; - }; + typedef Variant Value; class Values : public std::vector { public: @@ -181,7 +45,7 @@ namespace hurricane { } }; - class Tuple { + class Tuple : public base::Serializable { public: Tuple() = default; Tuple(std::initializer_list values) : _values(values) { @@ -195,6 +59,14 @@ namespace hurricane { return _values[index]; } + Value& operator[](const std::string& fieldName) { + return _values[_fieldsMap->at(fieldName)]; + } + + const Value& operator[](const std::string& fieldName) const { + return _values[_fieldsMap->at(fieldName)]; + } + int GetSize() const { return _values.size(); } @@ -203,8 +75,47 @@ namespace hurricane { _values.push_back(value); } + void SetSourceTask(const std::string& sourceTask) { + _sourceTask = sourceTask; + } + + const std::string& GetSourceTask() const { + return _sourceTask; + } + + void SetDestTask(const std::string& destTask) { + _destTask = destTask; + } + + const std::string& GetDestTask() const { + return _destTask; + } + + const Values& GetValues() const { + return _values; + } + + void SetFields(const std::vector* fields) { + _fields = fields; + } + + const std::vector* GetFields() const { + return _fields; + } + + void SetFieldsMap(const std::map* fieldsMap) { + _fieldsMap = fieldsMap; + } + + void Serialize(Variants& variants) const override; + void Deserialize(Variants::const_iterator& it) override; + private: - Values _values; - }; + std::string _sourceTask; + std::string _destTask; + Values _values; + const std::vector* _fields; + const std::map* _fieldsMap; + }; } } diff --git a/include/hurricane/base/Variant.h b/include/hurricane/base/Variant.h index 3e88078..403a4f9 100755 --- a/include/hurricane/base/Variant.h +++ b/include/hurricane/base/Variant.h @@ -24,17 +24,42 @@ #include #include #include +#include +#include + + +#define VARIANT_GETTER(TypeName, CType, valueName) \ + CType Get##TypeName##Value() const { \ + if ( _type == Type::Invalid ) { \ + std::cerr << "Invalid"; \ + } \ + \ + if ( _type == Type::##TypeName ) { \ + return valueName; \ + } \ + \ + throw "Type mismatched"; \ + } namespace hurricane { namespace base { class Variant; typedef std::vector Variants; + class Serializable { + public: + virtual void Serialize(Variants& variants) const = 0; + virtual void Deserialize(Variants::const_iterator& it) = 0; + }; + class Variant { public: enum class Type { Invalid, - Integer, + Int32, + Int64, + UInt32, + UInt64, Boolean, Float, String, @@ -44,14 +69,36 @@ namespace hurricane { static std::map TypeNames; Variant() : _type(Type::Invalid) {} - Variant(int32_t intValue) : _type(Type::Integer), _intValue(intValue) {} + Variant(int32_t intValue) : _type(Type::Int32), _int32Value(intValue) {} + Variant(int64_t longValue) : _type(Type::Int64), _int64Value(longValue) {} + Variant(uint32_t intValue) : _type(Type::UInt32), _uint32Value(intValue) {} + Variant(uint64_t longValue) : _type(Type::UInt64), _uint64Value(longValue) {} + Variant(bool boolValue) : _type(Type::Boolean), _boolValue(boolValue) {} + Variant(float floatValue) : _type(Type::Float), _floatValue(floatValue) {} Variant(const std::string& stringValue) : _type(Type::String), _stringValue(stringValue) { } + Variant(const char* stringValue) : _type(Type::String), _stringValue(stringValue) { + } ~Variant() {} Variant(const Variant& variant) : _type(variant._type) { - if ( _type == Type::Integer ) { - _intValue = variant._intValue; + if ( _type == Type::Int32 ) { + _int32Value = variant._int32Value; + } + else if ( _type == Type::Int64 ) { + _int64Value = variant._int64Value; + } + else if ( _type == Type::UInt32 ) { + _uint32Value = variant._uint32Value; + } + else if ( _type == Type::UInt64 ) { + _uint64Value = variant._uint64Value; + } + else if ( _type == Type::Boolean ) { + _boolValue = variant._boolValue; + } + else if ( _type == Type::Float ) { + _floatValue = variant._floatValue; } else if ( _type == Type::String ) { _stringValue = variant._stringValue; @@ -60,8 +107,23 @@ namespace hurricane { const Variant& operator=(const Variant& variant) { _type = variant._type; - if ( _type == Type::Integer ) { - _intValue = variant._intValue; + if ( _type == Type::Int32 ) { + _int32Value = variant._int32Value; + } + else if ( _type == Type::Int64 ) { + _int64Value = variant._int64Value; + } + else if ( _type == Type::UInt32 ) { + _uint32Value = variant._uint32Value; + } + else if ( _type == Type::UInt64 ) { + _uint64Value = variant._uint64Value; + } + else if ( _type == Type::Boolean ) { + _boolValue = variant._boolValue; + } + else if ( _type == Type::Float ) { + _floatValue = variant._floatValue; } else if ( _type == Type::String ) { _stringValue = variant._stringValue; @@ -79,21 +141,124 @@ namespace hurricane { throw "Not Implemented"; } - int32_t GetIntValue() const { + int32_t GetInt32Value() const { if ( _type == Type::Invalid ) { std::cerr << "Invalid"; } - if ( _type == Type::Integer ) { - return _intValue; + if ( _type == Type::Int32 ) { + return _int32Value; } + std::cout << "Type mismatched. " << + "Expected: " << TypeNames[Type::Int32] << + ". Actually: " << TypeNames[_type] << std::endl; throw "Type mismatched"; } - void SetIntValue(int32_t value) { - _type = Type::Integer; - _intValue = value; + void SetInt32Value(int32_t value) { + _type = Type::Int32; + _int32Value = value; + } + + int64_t GetInt64Value() const { + if ( _type == Type::Invalid ) { + std::cerr << "Invalid"; + } + + if ( _type == Type::Int64 ) { + return _int64Value; + } + + std::cout << "Type mismatched. " << + "Expected: " << TypeNames[Type::Int64] << + ". Actually: " << TypeNames[_type] << std::endl; + throw "Type mismatched"; + } + + void SetInt64Value(int64_t value) { + _type = Type::Int64; + _int64Value = value; + } + + uint32_t GetUInt32Value() const { + if ( _type == Type::Invalid ) { + std::cerr << "Invalid"; + } + + if ( _type == Type::UInt32 ) { + return _uint32Value; + } + + std::cout << "Type mismatched. " << + "Expected: " << TypeNames[Type::UInt32] << + ". Actually: " << TypeNames[_type] << std::endl; + throw "Type mismatched"; + } + + void SetUInt32Value(uint32_t value) { + _type = Type::UInt32; + _uint32Value = value; + } + + uint64_t GetUInt64Value() const { + if ( _type == Type::Invalid ) { + std::cerr << "Invalid"; + } + + if ( _type == Type::UInt64 ) { + return _uint64Value; + } + + std::cout << "Type mismatched. " << + "Expected: " << TypeNames[Type::UInt64] << + ". Actually: " << TypeNames[_type] << std::endl; + throw "Type mismatched"; + } + + void SetUInt64Value(uint64_t value) { + _type = Type::UInt64; + _uint64Value = value; + } + + bool GetBooleanValue() const { + if ( _type == Type::Invalid ) { + std::cerr << "Invalid"; + } + + if ( _type == Type::Boolean ) { + return _boolValue; + } + + std::cout << "Type mismatched. " << + "Expected: " << TypeNames[Type::Boolean] << + ". Actually: " << TypeNames[_type] << std::endl; + throw "Type mismatched"; + } + + void SetBooleanValue(bool value) { + _type = Type::Boolean; + _boolValue = value; + } + + float GetFloatValue() const { + if ( _type == Type::Invalid ) { + std::cerr << "Invalid"; + } + + if ( _type == Type::Float ) { + return _floatValue; + } + + std::cout << "Type mismatched. " << + "Expected: " << TypeNames[Type::Float] << + ". Actually: " << TypeNames[_type] << std::endl; + throw "Type mismatched"; + } + + void SetFloatValue(float value) { + _type = Type::Float; + _floatValue = value; } std::string GetStringValue() const { @@ -105,6 +270,9 @@ namespace hurricane { return _stringValue; } + std::cout << "Type mismatched. " << + "Expected: " << TypeNames[Type::String] << + ". Actually: " << TypeNames[_type] << std::endl; throw "Type mismatched"; } @@ -113,51 +281,168 @@ namespace hurricane { _stringValue = value; } + static void Deserialize(Variants::const_iterator& it, Variant& value) { + value = *it; + it ++; + } + + static void Deserialize(Variants::const_iterator& it, int32_t& value) { + value = it->GetInt32Value(); + it ++; + } + + static void Deserialize(Variants::const_iterator& it, int64_t& value) { + value = it->GetInt64Value(); + it ++; + } + + static void Deserialize(Variants::const_iterator& it, uint32_t& value) { + value = it->GetUInt32Value(); + it ++; + } + + static void Deserialize(Variants::const_iterator& it, uint64_t& value) { + value = it->GetUInt64Value(); + it ++; + } + + static void Deserialize(Variants::const_iterator& it, std::string& value) { + value = it->GetStringValue(); + it ++; + } + + static void Deserialize(Variants::const_iterator& it, Serializable& value) { + value.Deserialize(it); + } + template - static Variants FromStdSet(std::set originSet) { - Variants variants; - variants.push_back({ static_cast(originSet.size()) }); - for ( const Element& element : originSet ) { - variants.push_back({ element }); + static void Deserialize(Variants::const_iterator& it, std::vector& values) { + size_t size = 0; + Deserialize(it, size); + + values.resize(size); + for ( Element& value : values ) { + Deserialize(it, value); } + } - return variants; + template + static void Deserialize(Variants::const_iterator& it, std::list& values) { + size_t size = 0; + Deserialize(it, size); + + values.resize(size); + for ( Element& value : values ) { + Deserialize(it, value); + } } template - static std::set ToStdSet(const Variants& variants) { - std::set resultSet; + static void Deserialize(Variants::const_iterator& it, std::set& values) { + size_t size = 0; + Deserialize(it, size); + + values.clear(); + for ( int i = 0; i != size; ++ i ) { + Element value; + Deserialize(it, value); + + values.insert(value); + } + } + + template + static void Deserialize(Variants::const_iterator& it, std::map& values) { + size_t size = 0; + Deserialize(it, size); + + for ( int i = 0; i != size; ++ i ) { + Key key; + Deserialize(it, key); + + Element value; + Deserialize(it, value); + + values.insert({ key, value }); + } + } - bool isFirstValue = true; - for ( const Variant& variant : variants ) { - if ( isFirstValue ) { - isFirstValue = false; - continue; - } + static void Serialize(Variants& variants, const Variant& value) { + variants.push_back(value); + } - resultSet.insert(variant.GetValue()); + static void Serialize(Variants& variants, int32_t value) { + variants.push_back(Variant(value)); + } + + static void Serialize(Variants& variants, int64_t value) { + variants.push_back(Variant(value)); + } + + static void Serialize(Variants& variants, uint32_t value) { + variants.push_back(Variant(value)); + } + + static void Serialize(Variants& variants, uint64_t value) { + variants.push_back(Variant(value)); + } + + static void Serialize(Variants& variants, bool value) { + variants.push_back(Variant(value)); + } + + static void Serialize(Variants& variants, float value) { + variants.push_back(Variant(value)); + } + + static void Serialize(Variants& variants, const std::string& value) { + variants.push_back(Variant(value)); + } + + template + static void Serialize(Variants& variants, std::vector values) { + variants.push_back(Variant(values.size())); + for ( const Element& value : values ) { + Serialize(variants, value); } + } - return resultSet; + template + static void Serialize(Variants& variants, std::list values) { + variants.push_back(Variant(values.size())); + for ( const Element& value : values ) { + Serialize(variants, value); + } } template - static std::set ToStdSet(Variants::const_iterator beginIt, Variants::const_iterator endIt) { - std::set resultSet; + static void Serialize(Variants& variants, std::set values) { + variants.push_back(Variant(values.size())); + for ( const Element& value : values ) { + Serialize(variants, value); + } + } - for ( Variants::const_iterator it = beginIt; - it != endIt; - ++ it ) { - resultSet.insert(it->GetValue()); + template + static void Serialize(Variants& variants, std::map values) { + variants.push_back(Variant(values.size())); + for ( const std::pair& value : values ) { + Serialize(variants, value.first); + Serialize(variants, value.second); } + } - return resultSet; + static void Serialize(Variants& variants, const Serializable& object) { + object.Serialize(variants); } private: Type _type; union { - int32_t _intValue; + int32_t _int32Value; + int64_t _int64Value; + uint32_t _uint32Value; + uint64_t _uint64Value; bool _boolValue; float _floatValue; }; @@ -165,7 +450,27 @@ namespace hurricane { }; template<> inline int32_t Variant::GetValue() const { - return GetIntValue(); + return GetInt32Value(); + } + + template<> inline int64_t Variant::GetValue() const { + return GetInt64Value(); + } + + template<> inline uint32_t Variant::GetValue() const { + return GetUInt32Value(); + } + + template<> inline uint64_t Variant::GetValue() const { + return GetUInt64Value(); + } + + template<> inline bool Variant::GetValue() const { + return GetBooleanValue(); + } + + template<> inline float Variant::GetValue() const { + return GetFloatValue(); } template<> inline std::string Variant::GetValue() const { diff --git a/include/hurricane/base/externc.h b/include/hurricane/base/externc.h new file mode 100644 index 0000000..5b26ea1 --- /dev/null +++ b/include/hurricane/base/externc.h @@ -0,0 +1,8 @@ +#pragma once +#ifdef __cplusplus +#define BEGIN_EXTERN_C extern "C" { +#define END_EXTERN_C } +#else +#define BEGIN_EXTERN_C +#define END_EXTERN_C +#endif diff --git a/include/hurricane/bolt/BoltDeclarer.h b/include/hurricane/bolt/BoltDeclarer.h index 02682b0..247efec 100755 --- a/include/hurricane/bolt/BoltDeclarer.h +++ b/include/hurricane/bolt/BoltDeclarer.h @@ -3,6 +3,8 @@ #include "hurricane/task/TaskDeclarer.h" #include #include +#include +#include namespace hurricane { namespace bolt { @@ -24,16 +26,9 @@ namespace hurricane { SetGroupMethod(hurricane::task::TaskDeclarer::GroupMethod::Global); return *this; - } - - BoltDeclarer& Random(const std::string& sourceTaskName) { - SetSourceTaskName(sourceTaskName); - SetGroupMethod(hurricane::task::TaskDeclarer::GroupMethod::Random); - - return *this; - } + } - BoltDeclarer& Group(const std::string& sourceTaskName, const std::string& groupField) { + BoltDeclarer& Field(const std::string& sourceTaskName, const std::string& groupField) { SetSourceTaskName(sourceTaskName); SetGroupMethod(hurricane::task::TaskDeclarer::GroupMethod::Field); SetGroupField(groupField); @@ -41,6 +36,13 @@ namespace hurricane { return *this; } + BoltDeclarer& Random(const std::string& sourceTaskName) { + SetSourceTaskName(sourceTaskName); + SetGroupMethod(hurricane::task::TaskDeclarer::GroupMethod::Random); + + return *this; + } + const std::string& GetGroupField() const { return _groupField; } @@ -48,9 +50,24 @@ namespace hurricane { void SetGroupField(const std::string& groupField) { _groupField = groupField; } + + std::shared_ptr GetBolt() const { + return _bolt; + } + + const std::vector& GetFields() const { + return _fields; + } + + const std::map& GetFieldsMap() const { + return _fieldsMap; + } + private: std::shared_ptr _bolt; std::string _groupField; + std::vector _fields; + std::map _fieldsMap; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/collector/OutputCollector.h b/include/hurricane/collector/OutputCollector.h index 5e664f1..0be1c6d 100755 --- a/include/hurricane/collector/OutputCollector.h +++ b/include/hurricane/collector/OutputCollector.h @@ -1,14 +1,37 @@ #pragma once +#include + namespace hurricane { - namespace base { - class Tuple; - } - - namespace collector { - class OutputCollector { - public: - void Emit(const hurricane::base::Tuple& tuple); - }; - } -} \ No newline at end of file +namespace base { + class Tuple; +} + +namespace collector { + class OutputQueue; + + class OutputCollector { + public: + OutputCollector() : _taskIndex(-1) {} + OutputCollector(int taskIndex, const std::string& taskName, std::shared_ptr queue) : + _taskIndex(taskIndex), _taskName(taskName), _queue(queue) { + } + + void SetQueue(std::shared_ptr queue) { + _queue = queue; + } + + std::shared_ptr GetQueue() const { + return _queue; + } + + void Emit(const hurricane::base::Tuple& tuple); + + private: + int _taskIndex; + std::string _taskName; + std::shared_ptr _queue; + }; + +} +} diff --git a/include/hurricane/collector/OutputDispatcher.h b/include/hurricane/collector/OutputDispatcher.h new file mode 100644 index 0000000..0bba549 --- /dev/null +++ b/include/hurricane/collector/OutputDispatcher.h @@ -0,0 +1,99 @@ +#pragma once + +#include "hurricane/task/TaskInfo.h" +#include "hurricane/base/NetAddress.h" +#include +#include +#include +#include + +namespace hurricane { + +namespace message { +class CommandClient; +} + +namespace collector { + +class OutputQueue; +class OutputItem; +class TaskQueue; + +typedef std::pair TaskPathName; + +class DispatchTaskInfo { +private: + int _taskType; + std::vector _globalPaths; + std::vector> _randomPaths; + std::vector>> _fieldPaths; +}; + +class OutputDispatcher { +public: + typedef std::function AskFieldCallback; + + OutputDispatcher() : _selfSpoutCount(0) { + } + + OutputDispatcher(std::shared_ptr queue) : _queue(queue), _selfSpoutCount(0) { + } + + void SetQueue(std::shared_ptr queue) { + _queue = queue; + } + + std::shared_ptr GetQueue() const { + return _queue; + } + + void SetSelfAddress(const base::NetAddress& selfAddress) { + _selfAddress = selfAddress; + } + + void SetSelfSpoutCount(int selfSpoutCount) { + _selfSpoutCount = selfSpoutCount; + } + + void SetSelfTasks(std::vector> selfTasks) { + _selfTasks = selfTasks; + } + + void SetTaskInfos(const std::vector& taskInfos); + void SetNimbusClient(message::CommandClient* nimbusClient); + + void Start(); + + void SetTaskFields(const std::map*> & taskFields) { + _taskFields = taskFields; + } + + void SetTaskFieldsMap(const std::map*>& taskFieldsMap) { + _taskFieldsMap = taskFieldsMap; + } + +private: + void MainThread(); + bool ProcessPath(const task::TaskInfo& taskInfo, const task::PathInfo& path, + OutputItem* outputItem); + void SendTupleTo(OutputItem* outputItem, const task::ExecutorPosition& executorPosition); + void AskField(TaskPathName taskPathName, const std::string& fieldValue, AskFieldCallback callback); + +private: + std::shared_ptr _queue; + std::thread _thread; + base::NetAddress _selfAddress; + int _selfSpoutCount; + + std::vector _taskInfos; + + std::vector> _selfTasks; + std::shared_ptr _nimbusClient; + std::map _commandClients; + std::map> _fieldsDestinations; + std::map*> _taskFields; + std::map*> _taskFieldsMap; +}; + +} +} diff --git a/include/hurricane/collector/OutputQueue.h b/include/hurricane/collector/OutputQueue.h new file mode 100644 index 0000000..5efbc02 --- /dev/null +++ b/include/hurricane/collector/OutputQueue.h @@ -0,0 +1,37 @@ +#pragma once + +#include "hurricane/base/BlockingQueue.h" +#include "hurricane/base/Values.h" + +namespace hurricane { +namespace collector { + +class OutputItem { +public: + OutputItem(int taskIndex, const base::Tuple& tuple, const std::string& taskName) : + _taskIndex(taskIndex), _tuple(tuple) { + _tuple.SetSourceTask(taskName); + } + + int GetTaskIndex() const { + return _taskIndex; + } + + base::Tuple& GetTuple() { + return _tuple; + } + + const base::Tuple& GetTuple() const { + return _tuple; + } + +private: + int _taskIndex; + base::Tuple _tuple; +}; + +class OutputQueue : public base::BlockingQueue { +}; + +} +} diff --git a/include/hurricane/collector/TaskQueue.h b/include/hurricane/collector/TaskQueue.h new file mode 100644 index 0000000..bf84b2c --- /dev/null +++ b/include/hurricane/collector/TaskQueue.h @@ -0,0 +1,32 @@ +#pragma once + +#include "hurricane/base/BlockingQueue.h" +#include "hurricane/base/Values.h" + +namespace hurricane { +namespace collector { + +class TaskItem { +public: + TaskItem(int taskIndex, const base::Tuple& tuple) : + _taskIndex(taskIndex), _tuple(tuple) { + } + + int GetTaskIndex() const { + return _taskIndex; + } + + const base::Tuple& GetTuple() const { + return _tuple; + } + +private: + int _taskIndex; + base::Tuple _tuple; +}; + +class TaskQueue : public base::BlockingQueue { +}; + +} +} diff --git a/include/hurricane/message/Command.h b/include/hurricane/message/Command.h index 8048531..5dedbb9 100755 --- a/include/hurricane/message/Command.h +++ b/include/hurricane/message/Command.h @@ -1,8 +1,8 @@ #pragma once -#include -#include -#include "hurricane/base/ByteArray.h" +#include +#include +#include "hurricane/base/ByteArray.h" #include "hurricane/base/Variant.h" namespace hurricane { @@ -12,7 +12,11 @@ namespace hurricane { struct Type { enum { Invalid = 0, - Join + Join, + Heartbeat, + SyncMetadata, + SendTuple, + AskField }; }; @@ -57,7 +61,7 @@ namespace hurricane { hurricane::base::ByteArray Serialize() const; private: - int32_t _type; + int32_t _type; std::vector _arguments; }; @@ -97,6 +101,12 @@ namespace hurricane { return _arguments; } + void AddArguments(const std::vector& arguments) { + for ( const hurricane::base::Variant & argument : arguments ) { + _arguments.push_back(argument); + } + } + void AddArgument(const hurricane::base::Variant& argument) { _arguments.push_back(argument); } @@ -105,8 +115,8 @@ namespace hurricane { hurricane::base::ByteArray Serialize() const; private: - int32_t _status; + int32_t _status; std::vector _arguments; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/message/CommandClient.h b/include/hurricane/message/CommandClient.h index 5468204..ad309fc 100755 --- a/include/hurricane/message/CommandClient.h +++ b/include/hurricane/message/CommandClient.h @@ -13,6 +13,7 @@ namespace hurricane { class CommandClient { public: + typedef std::function ConnectCallback; typedef std::function SendCommandCallback; CommandClient(hurricane::util::NetConnector* connector) : _connector(connector) { @@ -20,10 +21,18 @@ namespace hurricane { ~CommandClient(); + void Connect(ConnectCallback callback); void SendCommand(const Command& command, SendCommandCallback callback); + hurricane::util::NetConnector* GetConnector() { + return _connector; + } + + const hurricane::util::NetConnector* GetConnector() const { + return _connector; + } private: hurricane::util::NetConnector* _connector; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/message/CommandServer.h b/include/hurricane/message/CommandServer.h index 829e50d..1790e09 100755 --- a/include/hurricane/message/CommandServer.h +++ b/include/hurricane/message/CommandServer.h @@ -44,13 +44,13 @@ namespace hurricane { _connectHandler = handler; } - template - void OnCommand(int32_t commandType, ObjectType* self, HandlerType handler) { - OnCommand(commandType, std::bind(handler, self, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)); - } - - void OnCommand(int32_t commandType, CommandHandler handler) { - _commandHandlers.insert({ commandType, handler }); + template + void OnCommand(int32_t commandType, ObjectType* self, HandlerType handler) { + OnCommand(commandType, std::bind(handler, self, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)); + } + + void OnCommand(int32_t commandType, CommandHandler handler) { + _commandHandlers.insert({ commandType, handler }); } void Response(hurricane::util::TcpConnection* connection, const Response& response); @@ -78,23 +78,34 @@ namespace hurricane { std::map& commandHandlers = _commandHandlers; ConnectHandler connectHandler = _connectHandler; - _listener->OnConnection([this, connectHandler, commandHandlers](std::shared_ptr connection) { - CommandServerContext* context = new CommandServerContext; - hurricane::util::TcpConnection* rawConnection = connection.get(); - - _connectHandler(context); - - connection->OnData([this, commandHandlers, context, rawConnection](const char* buffer, int32_t size) { - hurricane::base::ByteArray commandBytes(buffer, size); - Command command; - command.Deserialize(commandBytes); - - int32_t commandType = command.GetType(); - CommandHandler handler = commandHandlers.at(commandType); - Responser responser = std::bind(&CommandServer::Response, this, rawConnection, std::placeholders::_1); - - handler(context, command, responser); - }); + _listener->OnConnection([this, connectHandler, commandHandlers](std::shared_ptr connection) { + CommandServerContext* context = new CommandServerContext; + hurricane::util::TcpConnection* rawConnection = connection.get(); + + _connectHandler(context); + + connection->OnData([this, commandHandlers, context, rawConnection](const char* buffer, int32_t size) { + hurricane::base::ByteArray commandBytes(buffer, size); + Command command; + command.Deserialize(commandBytes); + + int32_t commandType = command.GetType(); + try { + CommandHandler handler = commandHandlers.at(commandType); + Responser responser = std::bind(&CommandServer::Response, this, rawConnection, std::placeholders::_1); + + handler(context, command, responser); + } + catch ( const std::exception& e ) { + std::cout << "Some errors in command handler" << std::endl; + std::cerr << e.what() << std::endl; + + Responser responser = std::bind(&CommandServer::Response, this, rawConnection, std::placeholders::_1); + + hurricane::message::Response response(hurricane::message::Response::Status::Failed); + responser(response); + } + }); }); _listener->StartListen(); @@ -106,4 +117,4 @@ namespace hurricane { connection->Send(responseBytes.data(), responseBytes.size()); } } -} \ No newline at end of file +} diff --git a/include/hurricane/message/Message.h b/include/hurricane/message/Message.h index 31b3936..44d1099 100755 --- a/include/hurricane/message/Message.h +++ b/include/hurricane/message/Message.h @@ -18,7 +18,8 @@ #pragma once -#include +#include "hurricane/base/BlockingQueue.h" +#include "hurricane/base/Variant.h" #include #include #include @@ -33,6 +34,10 @@ namespace hurricane { Message(int32_t type) : _type(type) { } + Message(int32_t type, const base::Variants& arguments) : + _type(type), _arguments(arguments) { + } + int32_t GetType() const { return _type; } @@ -41,23 +46,34 @@ namespace hurricane { _type = type; } + const base::Variants& GetArguments() const { + return _arguments; + } + + void SetArguments(const base::Variants& arguments) { + _arguments = arguments; + } + private: int32_t _type; + base::Variants _arguments; }; + class MessageQueue: public base::BlockingQueue { + }; + class MessageLoop { public: - typedef std::function MessageHandler; + typedef std::function MessageHandler; - MessageLoop() { - _threadId = GetCurrentThreadId(); + MessageLoop() { } MessageLoop(const MessageLoop&) = delete; const MessageLoop& operator=(const MessageLoop&) = delete; template - void MessageMap(int messageType, ObjectType* self, MethodType method) { + void MessageMap(int messageType, ObjectType* self, MethodType method) { MessageMap(messageType, std::bind(method, self, std::placeholders::_1)); } @@ -66,27 +82,29 @@ namespace hurricane { } void Run() { - MSG msg; + Message* msg = nullptr; - while ( GetMessage(&msg, 0, 0, 0) ) { - std::cerr << "Recived Message" << std::endl; - auto handler = _messageHandlers.find(msg.message); + while ( true ) { + _messageQueue.Pop(msg); + if ( msg ) { + auto handler = _messageHandlers.find(msg->GetType()); - if ( handler != _messageHandlers.end() ) { - handler->second((Message*)(msg.wParam)); - } + if ( handler != _messageHandlers.end() ) { + handler->second(*msg); + } - DispatchMessage(&msg); - } + delete msg; + } + } } - void PostMessage(Message* message) { - PostThreadMessage(_threadId, message->GetType(), WPARAM(message), 0); + void PostMessage(Message* message) { + _messageQueue.Push(message); } private: - std::map _messageHandlers; - int32_t _threadId; + std::map _messageHandlers; + MessageQueue _messageQueue; }; class MessageLoopManager { diff --git a/include/hurricane/service/Nimbus.h b/include/hurricane/service/Nimbus.h index 4afe333..8ce6827 100755 --- a/include/hurricane/service/Nimbus.h +++ b/include/hurricane/service/Nimbus.h @@ -14,20 +14,64 @@ namespace hurricane { class Topology; } + namespace message { + class CommandClient; + } + + namespace spout { + class SpoutDeclarer; + } + + namespace bolt { + class BoltDeclarer; + } + namespace service { + typedef std::pair TaskPathName; + class Nimbus : public hurricane::message::CommandServer { public: Nimbus(const hurricane::base::NetAddress& host); Nimbus(const hurricane::util::Configuration& configuration); void OnConnect(SupervisorContext* context); - void OnJoin(SupervisorContext* context, const hurricane::message::Command& command, + void OnJoin(SupervisorContext* context, const hurricane::message::Command& command, hurricane::message::CommandServer::Responser responser); + void OnAskField(SupervisorContext* context, const hurricane::message::Command& command, + hurricane::message::CommandServer::Responser responser); void SubmitTopology(hurricane::topology::Topology* topology); + + private: + void SendHeartbeat(const std::string supervisorId); + std::list GetAllSpoutTasks(const std::map& spoutDeclarers, hurricane::topology::Topology* topology); + void AllocateSpoutTasks(std::map nameToSpoutTasks, std::list originSpoutTasks); + std::map > AllocateSpoutTasks(std::list& originSpoutTasks); + std::list GetAllBoltTasks(hurricane::topology::Topology* topology, const std::map& boltDeclarers); + std::map > AllocateBoltTasks(std::list& originBoltTasks); + std::vector FindTask( + const std::map >& nameToBoltTasks, + const std::map >& nameToSpoutTasks, + const std::string& sourceTaskName); + std::vector FindTask( + const std::map >& nameToBoltTasks, + const std::string& sourceTaskName); + void CalculateTaskPaths( + const std::map >& nameToBoltTasks, + const std::map& boltDeclarers, + const std::map >& nameToSpoutTasks); + void ShowSupervisorMetadata(); + void ShowSupervisorTaskInfos(); + void ShowTaskInfos(const std::vector& taskInfos); + void SyncWithSupervisors(); private: hurricane::base::NetAddress _nimbusHost; std::vector _supervisors; - }; + int _supervisorCount; + std::shared_ptr _configuration; + std::map> _supervisorClients; + std::map> _fieldsCandidates; + std::map> _fieldsDestinations; + }; } -} \ No newline at end of file +} diff --git a/include/hurricane/service/Supervisor.h b/include/hurricane/service/Supervisor.h index a0b6086..8535f24 100755 --- a/include/hurricane/service/Supervisor.h +++ b/include/hurricane/service/Supervisor.h @@ -4,6 +4,7 @@ #include "hurricane/util/NetListener.h" #include "hurricane/base/NetAddress.h" #include "hurricane/service/SupervisorContext.h" +#include "hurricane/collector/OutputDispatcher.h" #include #include @@ -16,7 +17,21 @@ namespace hurricane { namespace message { class CommandClient; - } + } + + namespace task { + class SpoutExecutor; + class BoltExecutor; + } + + namespace topology { + class Topology; + } + + namespace collector { + class OutputCollector; + class TaskQueue; + } namespace service { class Supervisor : public hurricane::message::CommandServer { @@ -29,15 +44,44 @@ namespace hurricane { void JoinNimbus(JoinNimbusCallback callback); + void OnHeartbeat(SupervisorContext* context, const hurricane::message::Command& command, + hurricane::message::CommandServer::Responser responser); + void OnSyncMetadata(SupervisorContext* context, const hurricane::message::Command& command, + hurricane::message::CommandServer::Responser responser); + void OnSendTuple(SupervisorContext* context, const hurricane::message::Command& command, + hurricane::message::CommandServer::Responser responser); + + private: void InitSelfContext(); + void InitExecutors(); + void OwnSupervisorTasks(); + void ShowSupervisorMetadata(); + void ShowTaskInfos(); + void InitSpoutExecutors(); + void InitBoltExecutors(); + void InitNimbusConnector(); + void ReserveExecutors(); + void InitEvents(); + void InitTaskFieldsMap(); private: std::string _name; + std::string _host; + int _port; std::shared_ptr _supervisorConfiguration; hurricane::util::NetConnector* _nimbusConnector; hurricane::message::CommandClient* _nimbusClient; std::shared_ptr _selfContext; - }; + std::vector> _spoutExecutors; + std::vector> _boltExecutors; + std::vector> _spoutCollectors; + std::vector> _boltCollectors; + std::vector> _boltTaskQueues; + std::shared_ptr _topology; + hurricane::collector::OutputDispatcher _outputDispatcher; + std::map*> _taskFields; + std::map*> _taskFieldsMap; + }; } -} \ No newline at end of file +} diff --git a/include/hurricane/service/SupervisorContext.h b/include/hurricane/service/SupervisorContext.h index cb877ba..6163f71 100755 --- a/include/hurricane/service/SupervisorContext.h +++ b/include/hurricane/service/SupervisorContext.h @@ -1,14 +1,20 @@ #pragma once #include "hurricane/task/TaskInfo.h" + #include #include #include namespace hurricane { namespace service { - class SupervisorContext { - public: + class SupervisorContext : base::Serializable { + public: + enum class ExecutorType { + Spout, + Bolt + }; + SupervisorContext(); const std::string& GetId() const { @@ -19,6 +25,14 @@ namespace hurricane { _id = id; } + const hurricane::base::NetAddress& GetNetAddress() const { + return _netAddress; + } + + void SetNetAddress(const hurricane::base::NetAddress& netAddress) { + _netAddress = netAddress; + } + int GetSpoutCount() const { return _spoutCount; } @@ -72,6 +86,17 @@ namespace hurricane { _busySpouts.insert(spoutIndex); } + int useNextSpout() { + if ( !_freeSpouts.size() ) { + return -1; + } + + int spoutIndex = *(_freeSpouts.begin()); + useSpout(spoutIndex); + + return spoutIndex; + } + void freeSpout(int spoutIndex) { _freeSpouts.insert(spoutIndex); _busySpouts.erase(spoutIndex); @@ -82,11 +107,30 @@ namespace hurricane { _busyBolts.insert(boltIndex); } + int useNextBolt() { + if ( !_freeBolts.size() ) { + return -1; + } + + int boltIndex = *(_freeBolts.begin()); + useBolt(boltIndex); + + return boltIndex; + } + void freeBolt(int boltIndex) { _freeBolts.insert(boltIndex); _busyBolts.erase(boltIndex); } + void PrepareTaskInfos() { + _taskInfos.resize(_spoutCount + _boltCount); + } + + std::vector& GetTaskInfos() { + return _taskInfos; + } + const std::vector& GetTaskInfos() const { return _taskInfos; } @@ -99,18 +143,57 @@ namespace hurricane { return _taskInfos[index]; } - void SetTaskInfo(int index, const hurricane::task::TaskInfo& info) { + const hurricane::task::TaskInfo& GetSpoutTaskInfo(int index) const { + return _taskInfos[index]; + } + + const hurricane::task::TaskInfo& GetBoltTaskInfo(int index) const { + return _taskInfos[_spoutCount + index]; + } + + hurricane::task::TaskInfo& GetTaskInfo(int index) { + return _taskInfos[index]; + } + + hurricane::task::TaskInfo& GetSpoutTaskInfo(int index) { + return _taskInfos[index]; + } + + hurricane::task::TaskInfo& GetBoltTaskInfo(int index) { + return _taskInfos[_spoutCount + index]; + } + + void SetTaskInfo(int index, const hurricane::task::TaskInfo& info) { _taskInfos[index] = info; } - std::vector ToVariants(); - void ParseVariants(const std::vector& variants); - void ParseVariants(std::vector::const_iterator begin); - static SupervisorContext FromVariants(const std::vector& variants); - static SupervisorContext FromVariants(std::vector::const_iterator begin); + int SetSpoutTaskInfo(int index, const hurricane::task::TaskInfo& info) { + _taskInfos[index] = info; + + return index; + } + + int SetBoltTaskInfo(int index, const hurricane::task::TaskInfo& info) { + _taskInfos[_spoutCount + index] = info; + + return _spoutCount + index; + } + + int GetExecutorIndex(ExecutorType type, int subIndex) { + if ( type == ExecutorType::Spout ) { + return subIndex; + } + + return _spoutCount + subIndex; + } + + virtual void Serialize(hurricane::base::Variants& variants) const override; + virtual void Deserialize(hurricane::base::Variants::const_iterator& it) override; private: std::string _id; + hurricane::base::NetAddress _netAddress; + int _spoutCount; int _boltCount; @@ -120,7 +203,7 @@ namespace hurricane { std::set _busySpouts; std::set _busyBolts; - std::vector _taskInfos; + std::vector _taskInfos; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/spout/SpoutDeclarer.h b/include/hurricane/spout/SpoutDeclarer.h index 1e15c03..dadba48 100755 --- a/include/hurricane/spout/SpoutDeclarer.h +++ b/include/hurricane/spout/SpoutDeclarer.h @@ -3,6 +3,8 @@ #include "hurricane/task/TaskDeclarer.h" #include #include +#include +#include namespace hurricane { namespace spout { @@ -19,8 +21,22 @@ namespace hurricane { return *this; } + std::shared_ptr GetSpout() const { + return _spout; + } + + const std::vector& GetFields() const { + return _fields; + } + + const std::map& GetFieldsMap() const { + return _fieldsMap; + } + private: std::shared_ptr _spout; + std::vector _fields; + std::map _fieldsMap; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/task/BoltExecutor.h b/include/hurricane/task/BoltExecutor.h new file mode 100644 index 0000000..8a64939 --- /dev/null +++ b/include/hurricane/task/BoltExecutor.h @@ -0,0 +1,51 @@ +#pragma once + +#include "hurricane/task/Executor.h" +#include +#include + +namespace hurricane { + +namespace bolt { +class IBolt; +} + +namespace collector { +class TaskQueue; +} + +namespace task { + +class BoltExecutor : public Executor { +public: + BoltExecutor(); + ~BoltExecutor() {} + + void Start(); + + std::shared_ptr GetBolt() { + return _bolt; + } + + void SetBolt(bolt::IBolt* bolt) { + _bolt.reset(bolt); + } + + void SetTaskQueue(std::shared_ptr taskQueue) { + _taskQueue = taskQueue; + } + +protected: + hurricane::message::MessageLoop _loop; + +private: + void OnTuple(hurricane::message::Message& message); + void StartLoop(); + + std::thread _thread; + std::shared_ptr _bolt; + std::shared_ptr _taskQueue; +}; + +} +} diff --git a/include/hurricane/task/Executor.h b/include/hurricane/task/Executor.h new file mode 100644 index 0000000..e090ff5 --- /dev/null +++ b/include/hurricane/task/Executor.h @@ -0,0 +1,22 @@ +#pragma once + +#include "hurricane/message/Message.h" + +namespace hurricane { +namespace task { + +class Executor { +public: + struct MessageType { + enum { + OnTuple + }; + }; + + virtual ~Executor() {} + + virtual void Start() = 0; +}; + +} +} diff --git a/include/hurricane/task/SpoutExecutor.h b/include/hurricane/task/SpoutExecutor.h new file mode 100644 index 0000000..abf5f80 --- /dev/null +++ b/include/hurricane/task/SpoutExecutor.h @@ -0,0 +1,38 @@ +#pragma once + +#include "hurricane/task/Executor.h" +#include +#include + +namespace hurricane { + +namespace spout { + class ISpout; +} +namespace task { + +class SpoutExecutor : public Executor { +public: + SpoutExecutor(); + ~SpoutExecutor() {} + + void Start(); + void SetSpout(spout::ISpout* spout); + + std::shared_ptr GetSpout() { + return _spout; + } + + int GetFlowParam() const; + void SetFlowParam(int GetFlowParam); + +private: + void MainLoop(); + + std::thread _thread; + std::shared_ptr _spout; + int _flowParam; +}; + +} +} diff --git a/include/hurricane/task/TaskDeclarer.h b/include/hurricane/task/TaskDeclarer.h index 6d1f827..13fda73 100755 --- a/include/hurricane/task/TaskDeclarer.h +++ b/include/hurricane/task/TaskDeclarer.h @@ -17,9 +17,9 @@ namespace hurricane { struct GroupMethod { enum { - Global = 0, - Random = 1, - Field = 2 + Global = 0, + Field = 1, + Random = 2 }; }; @@ -84,4 +84,4 @@ namespace hurricane { int32_t _parallismHint; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/task/TaskInfo.h b/include/hurricane/task/TaskInfo.h index 026e044..4c3a4db 100755 --- a/include/hurricane/task/TaskInfo.h +++ b/include/hurricane/task/TaskInfo.h @@ -4,11 +4,108 @@ #include "hurricane/base/Variant.h" #include #include +#include +#include namespace hurricane { - namespace task { - class TaskInfo { - public: + namespace service { + class SupervisorContext; + } + + namespace task { + class ExecutorPosition : public hurricane::base::Serializable { + public: + ExecutorPosition() : _executorIndex(-1) { + } + + ExecutorPosition(const hurricane::base::NetAddress& supervisor, int executorIndex) : + _supervisor(supervisor), _executorIndex(executorIndex) { + } + + const hurricane::base::NetAddress& GetSupervisor() const { + return _supervisor; + } + + void SetSupervisor(const hurricane::base::NetAddress& supervisor) { + _supervisor = supervisor; + } + + int GetExecutorIndex() const { + return _executorIndex; + } + + void SetExecutorIndex(int executorIndex) { + _executorIndex = executorIndex; + } + + virtual void Serialize(hurricane::base::Variants& variants) const override; + virtual void Deserialize(hurricane::base::Variants::const_iterator& it) override; + + private: + hurricane::base::NetAddress _supervisor; + int32_t _executorIndex; + }; + + class PathInfo : public hurricane::base::Serializable { + public: + struct GroupMethod { + enum { + Invalid = 0, + Global, + Field, + Random + }; + }; + + PathInfo() : _groupMethod(GroupMethod::Invalid) {} + + int GetGroupMethod() const { + return _groupMethod; + } + + void SetGroupMethod(int groupMethod) { + _groupMethod = groupMethod; + } + + void SetDestinationTask(const std::string& taskName) { + _destinationTask = taskName; + } + + const std::string& GetTaskName() const { + return _destinationTask; + } + + const std::string& GetFieldName() const { + return _fieldName; + } + + void SetFieldName(const std::string& fieldName) { + _fieldName = fieldName; + } + + const std::vector& GetDestinationExecutors() const { + return _destinationExecutors; + } + + void SetDestinationExecutors(const std::vector& executors) { + _destinationExecutors = executors; + } + + virtual void Serialize(hurricane::base::Variants& variants) const override; + virtual void Deserialize(hurricane::base::Variants::const_iterator& it) override; + + private: + int _groupMethod; + std::string _destinationTask; + std::string _fieldName; + std::vector _destinationExecutors; + }; + + class TaskInfo : public hurricane::base::Serializable { + public: + TaskInfo() : _supervisorContext(nullptr), _executorIndex(-1) { + } + const std::string& GetTopologyName() const { return _topologyName; } @@ -23,52 +120,49 @@ namespace hurricane { void SetTaskName(const std::string& taskName) { _taskName = taskName; - } + } - const hurricane::base::NetAddress& GetSourceSupervisor() const { - return _sourceSupervisor; - } + const std::list& GetPaths() const { + return _paths; + } - void SetSourceSupervisor(const hurricane::base::NetAddress& sourceSupervisor) { - _sourceSupervisor = sourceSupervisor; - } + void SetPaths(const std::list& paths) { + _paths = paths; + } - int GetSourceExecutorIndex() const { - return _sourceExecutorIndex; - } + void AddPath(const PathInfo& path) { + _paths.push_back(path); + } - void SetSourceExecutorIndex(int sourceExecutorIndex) { - _sourceExecutorIndex = sourceExecutorIndex; - } + const hurricane::service::SupervisorContext* GetSupervisorContext() const { + return _supervisorContext; + } - const hurricane::base::NetAddress& GetDestinationSupervisor() const { - return _destinationSupervisor; - } + hurricane::service::SupervisorContext* GetSupervisorContext() { + return _supervisorContext; + } - void SetDestinationSupervisor(const hurricane::base::NetAddress& destinationSupervisor) { - _destinationSupervisor = destinationSupervisor; - } + void SetSupervisorContext(hurricane::service::SupervisorContext* context) { + _supervisorContext = context; + } - int GetDestinationExecutorIndex() const { - return _destinationExecutorIndex; - } + int GetExecutorIndex() const { + return _executorIndex; + } - void SetDestinationExecutorIndex(int destinationExecutorIndex) { - _destinationExecutorIndex = destinationExecutorIndex; - } + void SetExecutorIndex(int executorIndex) { + _executorIndex = executorIndex; + } - std::vector ToVariants(); - void ParseVariant(const std::vector& variants); - static TaskInfo FromVariants(const std::vector& variants); + virtual void Serialize(hurricane::base::Variants& variants) const override; + virtual void Deserialize(hurricane::base::Variants::const_iterator& it) override; std::string _topologyName; - std::string _taskName; - - hurricane::base::NetAddress _sourceSupervisor; - int32_t _sourceExecutorIndex; + std::string _taskName; + std::list _paths; - hurricane::base::NetAddress _destinationSupervisor; - int32_t _destinationExecutorIndex; + hurricane::service::SupervisorContext* _supervisorContext; + int _executorIndex; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/topology/TopologyLoader.h b/include/hurricane/topology/TopologyLoader.h index 43a6361..4a4fb89 100755 --- a/include/hurricane/topology/TopologyLoader.h +++ b/include/hurricane/topology/TopologyLoader.h @@ -5,14 +5,17 @@ #include #include +typedef void* LibraryHandle; + namespace hurricane { namespace topology { class Topology; class TopologyLoader { public: - static TopologyLoader& GetInstance(); + typedef Topology* (*TopologyGetter)(); + static TopologyLoader& GetInstance(); std::shared_ptr GetTopology(const std::string& name); private: @@ -21,6 +24,7 @@ namespace hurricane { const TopologyLoader& operator = (const TopologyLoader& loader) = delete; std::map> _topologies; + std::map _libraryHandles; }; } -} \ No newline at end of file +} diff --git a/include/hurricane/util/Socket.h b/include/hurricane/util/Socket.h index abe89d1..af7c395 100755 --- a/include/hurricane/util/Socket.h +++ b/include/hurricane/util/Socket.h @@ -98,7 +98,7 @@ namespace hurricane { int Send(const char* buf, size_t size) { int iSend = send(_socket, buf, size, 0); if ( iSend == SOCKET_ERROR ) - { + { throw SocketException("send() Failed"); } @@ -224,7 +224,7 @@ namespace hurricane { if ( ret == INVALID_SOCKET ) { DISPLAY_SOCKET_ERROR("connect() Failed"); - exit(-1); + throw SocketException("connect() Failed"); } } }; diff --git a/include/hurricane/util/StringUtil.h b/include/hurricane/util/StringUtil.h index f176060..eee8b7a 100755 --- a/include/hurricane/util/StringUtil.h +++ b/include/hurricane/util/StringUtil.h @@ -24,3 +24,5 @@ std::vector SplitString(const std::string& value, char seperator); std::string TrimString(const std::string& value); std::string RandomString(const std::string& candidate, int length); +std::string JoinStrings(const std::vector& words); +std::string Int2String(int value); diff --git a/include/redox.hpp b/include/redox.hpp new file mode 100644 index 0000000..4497399 --- /dev/null +++ b/include/redox.hpp @@ -0,0 +1,13 @@ +#pragma once + +#include + +namespace redox { + +class Redox { +public: + void connect(const std::string& host, int port) { + } +}; + +} diff --git a/include/sample/wordcount/HelloWorldSpout.h b/include/sample/wordcount/HelloWorldSpout.h index b3b6c04..a8ad329 100755 --- a/include/sample/wordcount/HelloWorldSpout.h +++ b/include/sample/wordcount/HelloWorldSpout.h @@ -1,6 +1,7 @@ #pragma once #include "hurricane/spout/ISpout.h" +#include class HelloWorldSpout : public hurricane::spout::ISpout { public: @@ -14,4 +15,5 @@ class HelloWorldSpout : public hurricane::spout::ISpout { private: std::shared_ptr _outputCollector; -}; \ No newline at end of file + std::vector _words; +}; diff --git a/include/sample/wordcount/SplitSentenceBolt.h b/include/sample/wordcount/SplitSentenceBolt.h index f51d9e9..f98a900 100755 --- a/include/sample/wordcount/SplitSentenceBolt.h +++ b/include/sample/wordcount/SplitSentenceBolt.h @@ -5,7 +5,7 @@ class SplitSentenceBolt : public hurricane::bolt::IBolt { public: virtual hurricane::bolt::IBolt* Clone() override { - return new SplitSentenceBolt(*this); + return new SplitSentenceBolt; } virtual void Prepare(std::shared_ptr outputCollector) override; virtual void Cleanup() override; @@ -13,5 +13,5 @@ class SplitSentenceBolt : public hurricane::bolt::IBolt { virtual void Execute(const hurricane::base::Tuple& tuple) override; private: - std::shared_ptr _outputCollector; -}; \ No newline at end of file + std::shared_ptr _outputCollector; +}; diff --git a/include/sample/wordcount/WordCountBolt.h b/include/sample/wordcount/WordCountBolt.h index 17a04c3..f03276a 100755 --- a/include/sample/wordcount/WordCountBolt.h +++ b/include/sample/wordcount/WordCountBolt.h @@ -1,6 +1,7 @@ #pragma once #include "hurricane/bolt/IBolt.h" +#include #include #include @@ -19,4 +20,5 @@ class WordCountBolt : public hurricane::bolt::IBolt { private: std::shared_ptr _outputCollector; std::map _wordCounts; -}; \ No newline at end of file + std::ofstream* _logFile; +}; diff --git a/include/sample/wordcount/WordCountTopology.h b/include/sample/wordcount/WordCountTopology.h index a0ea85e..de54d10 100755 --- a/include/sample/wordcount/WordCountTopology.h +++ b/include/sample/wordcount/WordCountTopology.h @@ -6,4 +6,8 @@ namespace hurricane { } } -hurricane::topology::Topology* GetTopology(); \ No newline at end of file +#include "hurricane/base/externc.h" + +BEGIN_EXTERN_C +hurricane::topology::Topology* GetTopology(); +END_EXTERN_C diff --git a/kake/Hurricane/Kakefile b/kake/Hurricane/Kakefile index d88c8d0..ce0ec39 100644 --- a/kake/Hurricane/Kakefile +++ b/kake/Hurricane/Kakefile @@ -6,6 +6,7 @@ module.exports = { type: 'solution', projects: [ 'nimbus', - 'supervisor' + 'supervisor', + 'wordcount' ] }; diff --git a/kake/Hurricane/deps.kake b/kake/Hurricane/deps.kake index 8eb077a..b994947 100644 --- a/kake/Hurricane/deps.kake +++ b/kake/Hurricane/deps.kake @@ -1,4 +1,9 @@ 'use strict'; module.exports = { + pthread: { + version: '1.0.0', + scope: 'system', + inputs: [ 'pthread' ] + } }; diff --git a/kake/Hurricane/nimbus/Kakefile b/kake/Hurricane/nimbus/Kakefile index fdeb573..559489c 100644 --- a/kake/Hurricane/nimbus/Kakefile +++ b/kake/Hurricane/nimbus/Kakefile @@ -19,8 +19,7 @@ module.exports = { '../../../src/hurricane/task', '../../../src/hurricane/topology', '../../../src/hurricane/util', - '../../../src/hurricane/tool/StartNimbus.cpp', - '../../../src/sample/wordcount' + '../../../src/hurricane/tool/StartNimbus.cpp' ], includePaths: [ '../../../include' @@ -28,5 +27,9 @@ module.exports = { }, linker: { ld: 'g++', + ldflags: [ '-ldl' ] + }, + dependencies: { + pthread: 'latest' } }; diff --git a/kake/Hurricane/supervisor/Kakefile b/kake/Hurricane/supervisor/Kakefile index 2c3fe14..f4591b8 100644 --- a/kake/Hurricane/supervisor/Kakefile +++ b/kake/Hurricane/supervisor/Kakefile @@ -19,8 +19,7 @@ module.exports = { '../../../src/hurricane/task', '../../../src/hurricane/topology', '../../../src/hurricane/util', - '../../../src/hurricane/tool/StartSupervisor.cpp', - '../../../src/sample/wordcount' + '../../../src/hurricane/tool/StartSupervisor.cpp' ], includePaths: [ '../../../include' @@ -28,5 +27,9 @@ module.exports = { }, linker: { ld: 'g++', + ldflags: [ '-ldl' ] + }, + dependencies: { + pthread: 'latest' } }; diff --git a/kake/Hurricane/wordcount/Kakefile b/kake/Hurricane/wordcount/Kakefile new file mode 100644 index 0000000..77f82ac --- /dev/null +++ b/kake/Hurricane/wordcount/Kakefile @@ -0,0 +1,31 @@ +'use strict'; + +module.exports = { + name: 'wordcount', + version: '0.0.1', + type: 'cpp', + target: 'dynamic_library', + targetPath: '../../../target', + compiler: { + cxx: 'g++', + cxxflags: ['-std=c++11'], + src: [ + '../../../src/hurricane/base', + '../../../src/hurricane/bolt', + '../../../src/hurricane/collector/OutputCollector.cpp', + '../../../src/hurricane/message', + '../../../src/hurricane/spout', + '../../../src/hurricane/task/TaskDeclarer.cpp', + '../../../src/hurricane/topology/Topology.cpp', + '../../../src/hurricane/util/Configuration.cpp', + '../../../src/hurricane/util/StringUtil.cpp', + ], + includePaths: [ + '../../../include' + ] + }, + linker: { + ld: 'g++', + ldflags: [ '-ldl' ] + } +}; diff --git a/msvc/12/Hurricane/hurricane.sln b/msvc/12/Hurricane/hurricane.sln deleted file mode 100755 index 8d1c597..0000000 --- a/msvc/12/Hurricane/hurricane.sln +++ /dev/null @@ -1,28 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 2013 -VisualStudioVersion = 12.0.21005.1 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "nimbus", "nimbus.vcxproj", "{4EAF81BF-7BC0-4753-9CAC-09270178AD9E}" -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "supervisor", "..\supervisor\supervisor.vcxproj", "{10138BD7-AC7E-44F5-8D1B-B4E21BF32EA8}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Win32 = Debug|Win32 - Release|Win32 = Release|Win32 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {4EAF81BF-7BC0-4753-9CAC-09270178AD9E}.Debug|Win32.ActiveCfg = Debug|Win32 - {4EAF81BF-7BC0-4753-9CAC-09270178AD9E}.Debug|Win32.Build.0 = Debug|Win32 - {4EAF81BF-7BC0-4753-9CAC-09270178AD9E}.Release|Win32.ActiveCfg = Release|Win32 - {4EAF81BF-7BC0-4753-9CAC-09270178AD9E}.Release|Win32.Build.0 = Release|Win32 - {10138BD7-AC7E-44F5-8D1B-B4E21BF32EA8}.Debug|Win32.ActiveCfg = Debug|Win32 - {10138BD7-AC7E-44F5-8D1B-B4E21BF32EA8}.Debug|Win32.Build.0 = Debug|Win32 - {10138BD7-AC7E-44F5-8D1B-B4E21BF32EA8}.Release|Win32.ActiveCfg = Release|Win32 - {10138BD7-AC7E-44F5-8D1B-B4E21BF32EA8}.Release|Win32.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/msvc/12/Hurricane/hurricane.suo b/msvc/12/Hurricane/hurricane.suo deleted file mode 100755 index 19aff86..0000000 Binary files a/msvc/12/Hurricane/hurricane.suo and /dev/null differ diff --git a/msvc/12/Hurricane/hurricane.v12.suo b/msvc/12/Hurricane/hurricane.v12.suo deleted file mode 100755 index af09a3a..0000000 Binary files a/msvc/12/Hurricane/hurricane.v12.suo and /dev/null differ diff --git a/msvc/12/Hurricane/nimbus.vcxproj b/msvc/12/Hurricane/nimbus.vcxproj deleted file mode 100755 index d69fd61..0000000 --- a/msvc/12/Hurricane/nimbus.vcxproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - Debug - Win32 - - - Release - Win32 - - - - {4EAF81BF-7BC0-4753-9CAC-09270178AD9E} - Win32Proj - nimbus - - - - Application - true - v120 - Unicode - - - Application - false - v120 - true - Unicode - - - - - - - - - - - - - true - - - false - - - - - - Level3 - Disabled - WIN32;_DEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions) - $(SolutionDir)..\..\..\include;%(AdditionalIncludeDirectories) - - - Console - true - - - - - Level3 - - - MaxSpeed - true - true - WIN32;NDEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions) - $(SolutionDir)..\..\..\include;%(AdditionalIncludeDirectories) - - - Console - true - true - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/msvc/12/Hurricane/nimbus.vcxproj.filters b/msvc/12/Hurricane/nimbus.vcxproj.filters deleted file mode 100755 index 87d0aad..0000000 --- a/msvc/12/Hurricane/nimbus.vcxproj.filters +++ /dev/null @@ -1,210 +0,0 @@ - - - - - {4FC737F1-C7A5-4376-A066-2A32D752A2FF} - cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hh;hpp;hxx;hm;inl;inc;xsd - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - - {ffe24b97-de3b-47ca-ab42-2f816437398e} - - - {199d07a0-bb3d-4b59-8e61-5de4b9d41fbb} - - - {09005c13-5d7c-484b-b00d-a501b5f2c1b0} - - - {1151322a-3d97-4ecb-96d3-c44b078d74cb} - - - {fb611766-ef59-4ff8-ae64-4ea489cfaeac} - - - {98820893-8530-45fa-b3bd-91d692c01c2f} - - - {68c7eba0-de2e-49bb-a6d2-9a14028b8b09} - - - {ee45ab63-07e8-4c35-b620-d5952a27925b} - - - {0299d7fa-d962-420a-ab54-f134589d4ca8} - - - {3428b634-33f6-47d4-9e98-96b7f42f1cdc} - - - {52b08f0d-35e5-48a0-9785-b9cd64c569b9} - - - {b3b88421-40c6-49d2-aaf8-6115ad1959d4} - - - {59cd36ce-74ab-4358-92cd-f7100273953e} - - - {9260776c-b3a2-4f82-b411-9e44fbf4b4b0} - - - {dbcdca99-c194-491a-a292-f8324313b6b4} - - - {9fc23f85-64cb-45c9-b42e-834064979719} - - - {55fe557c-155f-4e7c-978e-d1eb2ce2f57b} - - - {5f204b6a-131d-471e-9708-bdb32cd28adc} - - - {349da14a-0a4c-4882-92a8-10d77c8106b5} - - - {544a6286-18be-481a-8977-218e859a691d} - - - - - 源文件\hurricane\base - - - 源文件\hurricane\util - - - 源文件\hurricane\util - - - 源文件\hurricane\util - - - 源文件\hurricane\util - - - 源文件\hurricane\message - - - 源文件\hurricane\message - - - 源文件\hurricane\message - - - 源文件\hurricane\service - - - 源文件\hurricane\service - - - 源文件\hurricane\tool - - - 源文件\hurricane\util - - - 源文件\hurricane\service - - - 源文件\sample\wordcount - - - 源文件\sample\wordcount - - - 源文件\sample\wordcount - - - 源文件\sample\wordcount - - - 源文件\hurricane\collector - - - 源文件\hurricane\bolt - - - 源文件\hurricane\task - - - 源文件\hurricane\spout - - - 源文件\hurricane\topology - - - 源文件\hurricane\topology - - - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\message - - - 头文件\hurricane\util - - - 头文件\hurricane\util - - - 头文件\hurricane\util - - - 头文件\hurricane\util - - - 头文件\hurricane\message - - - 头文件\hurricane\message - - - 头文件\hurricane\message - - - 头文件\hurricane\service - - - 头文件\hurricane\service - - - 头文件\hurricane\util - - - 头文件\sample\wordcount - - - 头文件\sample\wordcount - - - 头文件\sample\wordcount - - - 头文件\sample\wordcount - - - \ No newline at end of file diff --git a/msvc/12/Hurricane/nimbus.vcxproj.user b/msvc/12/Hurricane/nimbus.vcxproj.user deleted file mode 100755 index aad40cd..0000000 --- a/msvc/12/Hurricane/nimbus.vcxproj.user +++ /dev/null @@ -1,11 +0,0 @@ - - - - supervisor - WindowsLocalDebugger - - - supervisor - WindowsLocalDebugger - - \ No newline at end of file diff --git a/msvc/12/supervisor/supervisor.vcxproj b/msvc/12/supervisor/supervisor.vcxproj deleted file mode 100755 index 60ae777..0000000 --- a/msvc/12/supervisor/supervisor.vcxproj +++ /dev/null @@ -1,140 +0,0 @@ - - - - - Debug - Win32 - - - Release - Win32 - - - - {10138BD7-AC7E-44F5-8D1B-B4E21BF32EA8} - Win32Proj - supervisor - - - - Application - true - v120 - Unicode - - - Application - false - v120 - true - Unicode - - - - - - - - - - - - - true - - - false - - - - - - Level3 - Disabled - WIN32;_DEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions) - - - Console - true - - - - - Level3 - - - MaxSpeed - true - true - WIN32;NDEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions) - $(SolutionDir)..\..\..\include;%(AdditionalIncludeDirectories) - - - Console - true - true - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/msvc/12/supervisor/supervisor.vcxproj.filters b/msvc/12/supervisor/supervisor.vcxproj.filters deleted file mode 100755 index 9bca714..0000000 --- a/msvc/12/supervisor/supervisor.vcxproj.filters +++ /dev/null @@ -1,258 +0,0 @@ - - - - - {4FC737F1-C7A5-4376-A066-2A32D752A2FF} - cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hh;hpp;hxx;hm;inl;inc;xsd - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - - {6db5b0e3-3360-4c96-b4ac-37b65711179d} - - - {6381391f-5970-40e1-802c-28062f38206f} - - - {bda14243-586f-4349-a8d0-c24ec53e841b} - - - {33dc06f7-95eb-47fd-a525-4810ef2ae260} - - - {210d0cb1-906c-4275-bca9-afa3ccfcebd3} - - - {f31bd5db-5faf-4016-b6cf-481571d76e6a} - - - {8586cbcb-14c3-492c-8a5e-d0e4713f7e4b} - - - {dc02e35f-b93e-4e58-b786-011eb32ff7ca} - - - {1d22a637-81e2-49fa-be38-ec59b107b8e1} - - - {54efb49c-98ff-47f5-90bb-55806423e6e3} - - - {ef039453-6bf0-4a40-a6e7-3c6881ddfbaf} - - - {fd69eae3-be8a-49c1-ad1c-e7d872c9057b} - - - {1fd7e809-e787-4bab-8352-31d76b35a5bc} - - - {18984799-86aa-4c61-ab60-64cfcce17f62} - - - {b5dd368e-0c4c-489a-8928-859dd4247b21} - - - {74d0bf59-849b-432c-bf43-7c7617c50e09} - - - {23be807c-8b76-4592-a8c1-56fd40a607fe} - - - {a1837d7e-79cb-44b2-834c-7ddb8707f849} - - - {aca90d15-7604-45af-8f6c-c2d69851571c} - - - {2d61bf55-be95-4909-a94b-5cffdba7fd79} - - - {3c2b553f-5a75-486f-87f9-8ab4de852cee} - - - {33394e40-b3a9-43ec-90b4-b0120b428f27} - - - {1d179d8d-5d71-4c4a-ba64-b48561089f30} - - - {5e801fe5-9744-4f5d-8e68-4ed6f6437568} - - - {c71a2b18-e63a-4b63-91ab-749a33415d95} - - - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\base - - - 头文件\hurricane\message - - - 头文件\hurricane\message - - - 头文件\hurricane\message - - - 头文件\hurricane\message - - - 头文件\hurricane\util - - - 头文件\hurricane\util - - - 头文件\hurricane\util - - - 头文件\hurricane\util - - - 头文件\hurricane\service - - - 头文件\hurricane\service - - - 头文件\hurricane\util - - - 头文件\hurricane\service - - - 头文件\hurricane\task - - - 头文件\hurricane\bolt - - - 头文件\hurricane\spout - - - 头文件\hurricane\task - - - 头文件\hurricane\spout - - - 头文件\hurricane\bolt - - - 头文件\hurricane\collector - - - 头文件\sample\wordcount - - - 头文件\sample\wordcount - - - 头文件\sample\wordcount - - - 头文件\hurricane\topology - - - 头文件\sample\wordcount - - - 头文件\hurricane\topology - - - 头文件\hurricane\task - - - - - 源文件\hurricane\base - - - 源文件\hurricane\message - - - 源文件\hurricane\message - - - 源文件\hurricane\message - - - 源文件\hurricane\util - - - 源文件\hurricane\util - - - 源文件\hurricane\util - - - 源文件\hurricane\util - - - 源文件\hurricane\service - - - 源文件\hurricane\service - - - 源文件\hurricane\tool - - - 源文件\hurricane\util - - - 源文件\hurricane\service - - - 源文件\hurricane\task - - - 源文件\hurricane\spout - - - 源文件\hurricane\bolt - - - 源文件\sample\wordcount - - - 源文件\sample\wordcount - - - 源文件\sample\wordcount - - - 源文件\hurricane\collector - - - 源文件\hurricane\topology - - - 源文件\sample\wordcount - - - 源文件\hurricane\topology - - - \ No newline at end of file diff --git a/src/hurricane/base/DataPackage.cpp b/src/hurricane/base/DataPackage.cpp index 0456161..e902ac2 100755 --- a/src/hurricane/base/DataPackage.cpp +++ b/src/hurricane/base/DataPackage.cpp @@ -24,8 +24,13 @@ namespace hurricane { class AllWritables { public: AllWritables() { - _writables.insert({ 0, std::shared_ptr(new IntWritable) }); - _writables.insert({ 3, std::shared_ptr(new StringWritable) }); + _writables.insert({ 0, std::shared_ptr(new Int32Writable) }); + _writables.insert({ 1, std::shared_ptr(new Int64Writable) }); + _writables.insert({ 2, std::shared_ptr(new UInt32Writable) }); + _writables.insert({ 3, std::shared_ptr(new UInt64Writable) }); + _writables.insert({ 4, std::shared_ptr(new BooleanWritable) }); + _writables.insert({ 5, std::shared_ptr(new FloatWritable) }); + _writables.insert({ 6, std::shared_ptr(new StringWritable) }); } std::map> _writables; @@ -39,13 +44,21 @@ namespace hurricane { } std::map Variant::TypeCodes = { - { Variant::Type::Integer, 0 }, - { Variant::Type::String, 3 } + { Variant::Type::Int32, 0 }, + { Variant::Type::Int64, 1 }, + { Variant::Type::UInt32, 2 }, + { Variant::Type::UInt64, 3 }, + { Variant::Type::Boolean, 4 }, + { Variant::Type::Float, 5 }, + { Variant::Type::String, 6 } }; std::map < Variant::Type, std::string > Variant::TypeNames = { { Variant::Type::Invalid, "Invalid" }, - { Variant::Type::Integer, "Integer" }, + { Variant::Type::Int32, "Int32" }, + { Variant::Type::Int64, "Int64" }, + { Variant::Type::UInt32, "UInt32" }, + { Variant::Type::UInt64, "UInt64" }, { Variant::Type::Boolean, "Boolean" }, { Variant::Type::Float, "Float" }, { Variant::Type::String, "String" } diff --git a/src/hurricane/base/Library.cpp b/src/hurricane/base/Library.cpp new file mode 100644 index 0000000..bd82ea9 --- /dev/null +++ b/src/hurricane/base/Library.cpp @@ -0,0 +1,43 @@ +#include "hurricane/base/Library.h" + +#ifndef WIN32 +#include +#else +#include + +extern HMODULE LibrarayHandle; +#endif + +#include +#include + +using namespace std; + +std::string GetLibraryPath() { +#ifndef WIN32 + Dl_info dllInfo; + + int ret = dladdr((void*)(GetLibraryPath), &dllInfo); + if ( !ret ) { + std::cout << "[ERROR] Get Library Path failed" << std::endl; + exit(EXIT_FAILURE); + } + + std::string libraryFilePath = dllInfo.dli_fname; + std::string::size_type pos = libraryFilePath.rfind('/'); +#else + const int MAX_FILE_NAME = 1000; + + char moduleFileName[MAX_FILE_NAME]; + GetModuleFileName(LibrarayHandle, moduleFileName, MAX_FILE_NAME); + + std::string libraryFilePath = moduleFileName; + std::string::size_type pos = libraryFilePath.rfind('\\'); +#endif + + if ( pos == std::string::npos ) { + return "."; + } + + return libraryFilePath.substr(0, pos); +} diff --git a/src/hurricane/base/Values.cpp b/src/hurricane/base/Values.cpp new file mode 100644 index 0000000..040434a --- /dev/null +++ b/src/hurricane/base/Values.cpp @@ -0,0 +1,21 @@ +#include "hurricane/base/Values.h" + +namespace hurricane { +namespace base { + +void Tuple::Serialize(Variants& variants) const +{ + base::Variant::Serialize(variants, _sourceTask); + base::Variant::Serialize(variants, _destTask); + base::Variant::Serialize(variants, _values); +} + +void Tuple::Deserialize(Variants::const_iterator& it) +{ + base::Variant::Deserialize(it, _sourceTask); + base::Variant::Deserialize(it, _destTask); + base::Variant::Deserialize(it, _values); +} + +} +} diff --git a/src/hurricane/bolt/BoltDeclarer.cpp b/src/hurricane/bolt/BoltDeclarer.cpp index e8bf300..914ebd2 100755 --- a/src/hurricane/bolt/BoltDeclarer.cpp +++ b/src/hurricane/bolt/BoltDeclarer.cpp @@ -7,6 +7,12 @@ namespace hurricane { _bolt(bolt){ SetType(hurricane::task::TaskDeclarer::Type::Bolt); SetTaskName(boltName); + + _fields = _bolt->DeclareFields(); + int fieldIndex = 0; + for ( const std::string& field : _fields ) { + _fieldsMap.insert({field, fieldIndex}); + } } } -} \ No newline at end of file +} diff --git a/src/hurricane/collector/OutputCollector.cpp b/src/hurricane/collector/OutputCollector.cpp index 655785c..91d7c85 100755 --- a/src/hurricane/collector/OutputCollector.cpp +++ b/src/hurricane/collector/OutputCollector.cpp @@ -1,13 +1,15 @@ #include "hurricane/collector/OutputCollector.h" +#include "hurricane/collector/OutputQueue.h" #include "hurricane/base/Values.h" #include namespace hurricane { namespace collector { - void OutputCollector::Emit(const hurricane::base::Tuple& tuple) { - std::cout << "Output Collector" << std::endl; - std::cout << tuple.GetSize() << std::endl; + void OutputCollector::Emit(const hurricane::base::Tuple& tuple) { + if ( _taskIndex != -1 ) { + _queue->Push(new OutputItem(_taskIndex, tuple, _taskName)); + } } } -} \ No newline at end of file +} diff --git a/src/hurricane/collector/OutputDispatcher.cpp b/src/hurricane/collector/OutputDispatcher.cpp new file mode 100644 index 0000000..b51e94c --- /dev/null +++ b/src/hurricane/collector/OutputDispatcher.cpp @@ -0,0 +1,171 @@ +#include "hurricane/collector/OutputDispatcher.h" +#include "hurricane/collector/OutputQueue.h" +#include "hurricane/collector/TaskQueue.h" +#include "hurricane/util/StringUtil.h" +#include "hurricane/message/CommandClient.h" +#include "hurricane/message/Command.h" +#include "hurricane/util/NetConnector.h" + +namespace hurricane { +namespace collector { + +void OutputDispatcher::SetTaskInfos(const std::vector& taskInfos) +{ + _taskInfos = taskInfos; +} + +void OutputDispatcher::SetNimbusClient(message::CommandClient* nimbusClient) +{ + _nimbusClient.reset(nimbusClient); +} + +void OutputDispatcher::Start() +{ + _thread = std::thread(&OutputDispatcher::MainThread, this); +} + +void OutputDispatcher::MainThread() +{ + while ( true ) { + OutputItem* outputItem = nullptr; + if ( _queue->Pop(outputItem) ) { + int taskIndex = outputItem->GetTaskIndex(); + task::TaskInfo taskInfo = _taskInfos[taskIndex]; + for ( const task::PathInfo& pathInfo : taskInfo.GetPaths() ) { + ProcessPath(taskInfo, pathInfo, outputItem); + } + + delete outputItem; + outputItem = nullptr; + } + } +} + +bool OutputDispatcher::ProcessPath(const task::TaskInfo& taskInfo, const task::PathInfo& path, + OutputItem* outputItem) +{ + std::string sourceTaskName = taskInfo.GetTaskName(); + std::string destTaskName = path.GetTaskName(); + + outputItem->GetTuple().SetSourceTask(sourceTaskName); + outputItem->GetTuple().SetDestTask(destTaskName); + + if ( path.GetGroupMethod() == task::PathInfo::GroupMethod::Global ) { + const task::ExecutorPosition& executorPosition = path.GetDestinationExecutors()[0]; + + SendTupleTo(outputItem, executorPosition); + } + else if ( path.GetGroupMethod() == task::PathInfo::GroupMethod::Random ) { + int destCount = path.GetDestinationExecutors().size(); + int destIndex = rand() % destCount; + + const task::ExecutorPosition& executorPosition = path.GetDestinationExecutors()[destIndex]; + + SendTupleTo(outputItem, executorPosition); + } + else if ( path.GetGroupMethod() == task::PathInfo::GroupMethod::Field ) { + TaskPathName taskPathName = { sourceTaskName, destTaskName }; + + auto taskPairIter = _fieldsDestinations.find(taskPathName); + if ( taskPairIter == _fieldsDestinations.end() ) { + _fieldsDestinations.insert({ taskPathName, std::map() }); + taskPairIter = _fieldsDestinations.find(taskPathName); + } + + std::map& destinations = taskPairIter->second; + int fieldIndex = this->_taskFieldsMap[sourceTaskName]->at(path.GetFieldName()); + std::string fieldValue = outputItem->GetTuple()[fieldIndex].GetStringValue(); + auto fieldDestIter = destinations.find(fieldValue); + + if ( fieldDestIter == destinations.end() ) { + AskField(taskPathName, fieldValue, + [taskPathName, outputItem, fieldValue, this](task::ExecutorPosition executorPosition) -> void { + _fieldsDestinations[taskPathName].insert({fieldValue, executorPosition}); + SendTupleTo(outputItem, executorPosition); + }); + } + else { + const task::ExecutorPosition& executorPosition = fieldDestIter->second; + SendTupleTo(outputItem, executorPosition); + } + } +} + +void OutputDispatcher::SendTupleTo(OutputItem* outputItem, const task::ExecutorPosition& executorPosition) +{ + hurricane::base::NetAddress destAddress = executorPosition.GetSupervisor(); + std::string destIdentifier = destAddress.GetHost() + ":" + Int2String(destAddress.GetPort()); + std::string selfIdentifier = _selfAddress.GetHost() + ":" + Int2String(_selfAddress.GetPort()); + + if ( destIdentifier == selfIdentifier ) { + int executorIndex = executorPosition.GetExecutorIndex(); + int boltIndex = executorIndex - _selfSpoutCount; + + std::shared_ptr taskQueue = _selfTasks[boltIndex]; + TaskItem* taskItem = new TaskItem(outputItem->GetTaskIndex(), outputItem->GetTuple()); + taskQueue->Push(taskItem); + } + else { + std::map::iterator commandClientPair = + _commandClients.find(destIdentifier); + if ( commandClientPair == _commandClients.end() ) { + util::NetConnector* connector = new util::NetConnector(destAddress); + message::CommandClient* commandClient = new message::CommandClient(connector); + _commandClients.insert({destIdentifier, commandClient}); + + commandClientPair = _commandClients.find(destIdentifier); + } + + message::CommandClient* commandClient = commandClientPair->second; + + commandClient->GetConnector()->Connect([ + outputItem, commandClient, destIdentifier, executorPosition, this] { + hurricane::message::Command command(hurricane::message::Command::Type::SendTuple); + + base::Variants commandVariants; + _selfAddress.Serialize(commandVariants); + executorPosition.Serialize(commandVariants); + outputItem->GetTuple().Serialize(commandVariants); + + command.AddArguments(commandVariants); + + try { + commandClient->SendCommand(command, + [destIdentifier, this](const hurricane::message::Response& response) -> void { + if ( response.GetStatus() == hurricane::message::Response::Status::Successful ) { + } + else { + std::cout << "Send to " << destIdentifier << " failed." << std::endl; + } + }); + } + catch ( util::SocketException& e ) { +// std::cout << e.what() << std::endl; + } + }); + } +} + +void OutputDispatcher::AskField(TaskPathName taskPathName, + const std::string& fieldValue, OutputDispatcher::AskFieldCallback callback) +{ + _nimbusClient->Connect([taskPathName, fieldValue, callback, this]() { + hurricane::message::Command command(hurricane::message::Command::Type::AskField); + command.AddArgument(taskPathName.first); + command.AddArgument(taskPathName.second); + command.AddArgument(fieldValue); + + _nimbusClient->SendCommand(command, [callback](const hurricane::message::Response& response) { + task::ExecutorPosition destination; + const base::Variants respArguments = response.GetArguments(); + + base::Variants::const_iterator argIter = respArguments.cbegin(); + destination.Deserialize(argIter); + + callback(destination); + }); + }); +} + +} +} diff --git a/src/hurricane/message/Command.cpp b/src/hurricane/message/Command.cpp index 7c472a0..c8db121 100755 --- a/src/hurricane/message/Command.cpp +++ b/src/hurricane/message/Command.cpp @@ -13,7 +13,7 @@ namespace hurricane { } _arguments = dataPackage.GetVariants(); - _type = _arguments[0].GetIntValue(); + _type = _arguments[0].GetInt32Value(); _arguments.erase(_arguments.begin()); } @@ -37,7 +37,7 @@ namespace hurricane { } _arguments = dataPackage.GetVariants(); - _status = _arguments[0].GetIntValue(); + _status = _arguments[0].GetInt32Value(); _arguments.erase(_arguments.begin()); } @@ -52,4 +52,4 @@ namespace hurricane { return dataPackage.Serialize(); } } -} \ No newline at end of file +} diff --git a/src/hurricane/message/CommandClient.cpp b/src/hurricane/message/CommandClient.cpp index c9a96a9..bc9c3bc 100755 --- a/src/hurricane/message/CommandClient.cpp +++ b/src/hurricane/message/CommandClient.cpp @@ -10,13 +10,17 @@ namespace hurricane { if ( _connector ) { delete _connector; _connector = nullptr; - } - } + } + } + + void CommandClient::Connect(CommandClient::ConnectCallback callback) + { + this->_connector->Connect(callback); + } void CommandClient::SendCommand(const Command& command, SendCommandCallback callback) { try { - hurricane::base::ByteArray commandBytes = command.Serialize(); - std::cout << commandBytes.size() << std::endl; + hurricane::base::ByteArray commandBytes = command.Serialize(); _connector->SendAndReceive(commandBytes.data(), commandBytes.size(), [callback](char* resultBuffer, int32_t readSize) { hurricane::message::Response response; @@ -32,4 +36,4 @@ namespace hurricane { } } } -} \ No newline at end of file +} diff --git a/src/hurricane/service/Nimbus.cpp b/src/hurricane/service/Nimbus.cpp index 2029ace..94e4965 100755 --- a/src/hurricane/service/Nimbus.cpp +++ b/src/hurricane/service/Nimbus.cpp @@ -1,91 +1,527 @@ #include "hurricane/service/Nimbus.h" +#include "hurricane/util/NetConnector.h" +#include "hurricane/message/CommandClient.h" #include "hurricane/util/Configuration.h" #include "hurricane/topology/Topology.h" +#include "hurricane/topology/TopologyLoader.h" #include "sample/wordcount/WordCountTopology.h" #include +#include +#include +#include +#include +#include namespace hurricane { namespace service { Nimbus::Nimbus(const hurricane::base::NetAddress& host) : CommandServer(new hurricane::util::NetListener(host)), - _nimbusHost(host) { + _nimbusHost(host), + _supervisorCount(0) { OnConnection(std::bind(&Nimbus::OnConnect, this, std::placeholders::_1)); OnCommand(hurricane::message::Command::Type::Join, this, &Nimbus::OnJoin); + OnCommand(hurricane::message::Command::Type::AskField, this, &Nimbus::OnAskField); } Nimbus::Nimbus(const hurricane::util::Configuration& configuration) : Nimbus(hurricane::base::NetAddress( configuration.GetProperty("nimbus.host"), - configuration.GetIntegerProperty("nimbus.port"))) { + configuration.GetIntegerProperty("nimbus.port"))) { + _supervisorCount = configuration.GetIntegerProperty("nimbus.supervisor.count"); + _configuration.reset(new hurricane::util::Configuration(configuration)); + + std::cout << "Need supervisors: " << _supervisorCount << std::endl; + } + + void Nimbus::OnConnect(SupervisorContext* context) { } - void Nimbus::OnConnect(SupervisorContext* context) { - } - - void Nimbus::OnJoin(SupervisorContext* context, const hurricane::message::Command& command, - hurricane::message::CommandServer::Responser responser) { - std::string joinerType = command.GetArgument(0).GetStringValue(); - - std::cout << "Join node: " << joinerType << std::endl; - - SupervisorContext supervisorContext = SupervisorContext::FromVariants(command.GetArguments().cbegin() + 1); + void Nimbus::OnJoin(SupervisorContext* context, const hurricane::message::Command& command, + hurricane::message::CommandServer::Responser responser) { + std::string joinerType = command.GetArgument(0).GetStringValue(); + std::string supervisorHost = command.GetArgument(1).GetStringValue(); + int supervisorPort = command.GetArgument(2).GetInt32Value(); + + std::cout << "Join node: " << joinerType << std::endl; + + SupervisorContext supervisorContext; + base::Variants::const_iterator currentIterator = command.GetArguments().cbegin() + 3; + supervisorContext.Deserialize(currentIterator); std::cout << "Supervisor name: " << supervisorContext.GetId() << std::endl; + std::cout << "Host: " << supervisorHost << std::endl; + std::cout << "Port: " << supervisorPort << std::endl; std::cout << "Spout count: " << supervisorContext.GetSpoutCount() << std::endl; std::cout << "Bolt count: " << supervisorContext.GetBoltCount() << std::endl; std::cout << "Task info count: " << supervisorContext.GetTaskInfos().size() << std::endl; std::cout << "Free spout count: " << supervisorContext.GetFreeSpouts().size() << std::endl; std::cout << "Free bolt count: " << supervisorContext.GetFreeBolts().size() << std::endl; std::cout << "Busy spout count: " << supervisorContext.GetBusySpouts().size() << std::endl; - std::cout << "Busy bolt count: " << supervisorContext.GetBusyBolts().size() << std::endl; - - _supervisors.push_back(supervisorContext); - - if ( _supervisors.size() == 2 ) { - topology::Topology* topology = GetTopology(); - SubmitTopology(topology); - } - - hurricane::message::Response response(hurricane::message::Response::Status::Successful); - response.AddArgument({ "nimbus" }); - + std::cout << "Busy bolt count: " << supervisorContext.GetBusyBolts().size() << std::endl; + + supervisorContext.SetNetAddress(hurricane::base::NetAddress( + supervisorHost, supervisorPort)); + supervisorContext.PrepareTaskInfos(); + _supervisors.push_back(supervisorContext); + + // Response + hurricane::message::Response response(hurricane::message::Response::Status::Successful); + response.AddArgument({ "nimbus" }); + responser(response); - } - void Nimbus::SubmitTopology(hurricane::topology::Topology* topology) { - std::cout << "Submit topology: " << topology->GetName() << std::endl; + // Initialize command clients + hurricane::base::NetAddress supervisorAddress(supervisorHost, + supervisorPort); + hurricane::util::NetConnector* supervisorConnector = + new hurricane::util::NetConnector(supervisorAddress); + hurricane::message::CommandClient* supervisorCommandClient = + new hurricane::message::CommandClient(supervisorConnector); + + _supervisorClients.insert({supervisorContext.GetId(), + std::shared_ptr(supervisorCommandClient)}); + + SendHeartbeat(supervisorContext.GetId()); + + // Initialize topology + if ( _supervisors.size() == _supervisorCount ) { + std::string topologyName = _configuration->GetProperty("topology.name"); + hurricane::topology::Topology* topology = + hurricane::topology::TopologyLoader::GetInstance().GetTopology(topologyName).get(); + SubmitTopology(topology); + } + } + + void Nimbus::OnAskField(SupervisorContext* context, const hurricane::message::Command& command, + hurricane::message::CommandServer::Responser responser) + { + std::string sourceTaskName = command.GetArgument(0).GetStringValue(); + std::string destTaskName = command.GetArgument(1).GetStringValue(); + TaskPathName taskPathName = { sourceTaskName, destTaskName }; + std::string fieldValue = command.GetArgument(2).GetStringValue(); - const std::map& spoutDeclarers = topology->GetSpoutDeclarers(); - const std::map& boltDeclarers = topology->GetBoltDeclarers(); + auto taskPairIter = _fieldsDestinations.find(taskPathName); + if ( taskPairIter == _fieldsDestinations.end() ) { + _fieldsDestinations.insert({ taskPathName, std::map() }); + taskPairIter = _fieldsDestinations.find(taskPathName); + } + + std::map& destinations = taskPairIter->second; + auto destinationPairIter = destinations.find(fieldValue); + if ( destinationPairIter == destinations.end() ) { + std::vector& candidates = _fieldsCandidates[taskPathName]; + int positionIndex = rand() % candidates.size(); + + destinations.insert({fieldValue, candidates[positionIndex]}); + destinationPairIter = destinations.find(fieldValue); + } + + task::ExecutorPosition destination = destinationPairIter->second; + base::Variants destinationVariants; + destination.Serialize(destinationVariants); + + hurricane::message::Response response(hurricane::message::Response::Status::Successful); + response.AddArguments(destinationVariants); + + responser(response); + } + std::list Nimbus::GetAllSpoutTasks( + const std::map& spoutDeclarers, + hurricane::topology::Topology* topology) + { + std::list originSpoutTasks; for ( const auto& spoutPair : spoutDeclarers ) { hurricane::spout::SpoutDeclarer spoutDeclarer = spoutPair.second; std::cout << "Spout " << spoutDeclarer.GetTaskName() << std::endl; std::cout << "ParallismHint: " << spoutDeclarer.GetParallismHint() << std::endl; - - // Allocate spout tasks - for ( const SupervisorContext& supervisorContext : _supervisors ) { - if ( supervisorContext.GetFreeSpouts().size() ) { + int parallismHint = spoutDeclarer.GetParallismHint(); + for ( int taskIndex = 0; taskIndex != parallismHint; ++ taskIndex ) { + hurricane::task::TaskInfo taskInfo; + taskInfo.SetTopologyName(topology->GetName()); + taskInfo.SetTaskName(spoutDeclarer.GetTaskName()); + + originSpoutTasks.push_back(taskInfo); + } + } + + return originSpoutTasks; + } + + std::map> + Nimbus::AllocateSpoutTasks(std::list& originSpoutTasks) + { + std::map> nameToSpoutTasks; + // Allocate task for every supervisor + for ( SupervisorContext& supervisorContext : _supervisors ) { + if ( !originSpoutTasks.size() ) { + break; + } + + while ( true ) { + if ( !originSpoutTasks.size() ) { + break; + } + + // If useNextSpout return -1, the spout slots is used up + int spoutIndex = supervisorContext.useNextSpout(); + if ( spoutIndex == -1 ) { + break; + } + + // Put the spout task into spout slot + hurricane::task::TaskInfo taskInfo = originSpoutTasks.front(); + taskInfo.SetSupervisorContext(&supervisorContext); + taskInfo.SetExecutorIndex(supervisorContext.GetExecutorIndex( + SupervisorContext::ExecutorType::Spout, spoutIndex)); + originSpoutTasks.pop_front(); + supervisorContext.SetSpoutTaskInfo(spoutIndex, taskInfo); + // Insert the spout task pointer into mapper + std::string taskName = taskInfo.GetTaskName(); + auto spoutTasksPair = nameToSpoutTasks.find(taskName); + if ( spoutTasksPair == nameToSpoutTasks.end() ) { + nameToSpoutTasks.insert({taskName, std::vector()}); + spoutTasksPair = nameToSpoutTasks.find(taskName); } + + spoutTasksPair->second.push_back(&(supervisorContext.GetSpoutTaskInfo(spoutIndex))); } } + return nameToSpoutTasks; + } + + std::list Nimbus::GetAllBoltTasks(hurricane::topology::Topology* topology, + const std::map& boltDeclarers) + { + std::list originBoltTasks; for ( const auto& boltPair : boltDeclarers ) { hurricane::bolt::BoltDeclarer boltDeclarer = boltPair.second; std::cout << "Bolt " << boltDeclarer.GetTaskName() << std::endl; std::cout << "Source: " << boltDeclarer.GetSourceTaskName() << std::endl; std::cout << "ParallismHint: " << boltDeclarer.GetParallismHint() << std::endl; - // Allocate bolt tasks - for ( const SupervisorContext& supervisorContext : _supervisors ) { - if ( supervisorContext.GetFreeBolts().size() ) { + int parallismHint = boltDeclarer.GetParallismHint(); + for ( int taskIndex = 0; taskIndex != parallismHint; ++ taskIndex ) { + hurricane::task::TaskInfo taskInfo; + taskInfo.SetTopologyName(topology->GetName()); + taskInfo.SetTaskName(boltDeclarer.GetTaskName()); + + originBoltTasks.push_back(taskInfo); + } + } + + return originBoltTasks; + } + + std::map> + Nimbus::AllocateBoltTasks(std::list& originBoltTasks) + { + std::map> nameToBoltTasks; + // Allocate bolt tasks + for ( SupervisorContext& supervisorContext : _supervisors ) { + if ( !originBoltTasks.size() ) { + break; + } + + while ( true ) { + if ( !originBoltTasks.size() ) { + break; + } + + // If useNextBolt return -1, the bolt slots is used up + int boltIndex = supervisorContext.useNextBolt(); + if ( boltIndex == -1 ) { + break; + } + + // Put the bolt task into bolt slot + hurricane::task::TaskInfo taskInfo = originBoltTasks.front(); + taskInfo.SetSupervisorContext(&supervisorContext); + taskInfo.SetExecutorIndex(supervisorContext.GetExecutorIndex( + SupervisorContext::ExecutorType::Bolt, boltIndex)); + originBoltTasks.pop_front(); + supervisorContext.SetBoltTaskInfo(boltIndex, taskInfo); + // Insert the bolt task pointer into mapper + std::string taskName = taskInfo.GetTaskName(); + auto boltTasksPair = nameToBoltTasks.find(taskName); + if ( boltTasksPair == nameToBoltTasks.end() ) { + nameToBoltTasks.insert({taskName, std::vector()}); + boltTasksPair = nameToBoltTasks.find(taskName); } + + boltTasksPair->second.push_back(&(supervisorContext.GetBoltTaskInfo(boltIndex))); + } + } + + return nameToBoltTasks; + } + + std::vector Nimbus::FindTask( + const std::map>& nameToBoltTasks, + const std::map>& nameToSpoutTasks, + const std::string& sourceTaskName) + { + auto spoutTaskPair = nameToSpoutTasks.find(sourceTaskName); + if ( spoutTaskPair != nameToSpoutTasks.end() ) { + return spoutTaskPair->second; + } + + auto boltTaskPair = nameToBoltTasks.find(sourceTaskName); + if ( boltTaskPair != nameToBoltTasks.end() ) { + return boltTaskPair->second; + } + + return std::vector(); + } + + std::vector Nimbus::FindTask( + const std::map>& nameToBoltTasks, + const std::string& sourceTaskName) + { + auto boltTaskPair = nameToBoltTasks.find(sourceTaskName); + if ( boltTaskPair != nameToBoltTasks.end() ) { + return boltTaskPair->second; + } + + return std::vector(); + } + + void Nimbus::ShowTaskInfos(const std::vector& taskInfos) + { + for ( const hurricane::task::TaskInfo& taskInfo : taskInfos ) { + if ( !taskInfo.GetSupervisorContext() ) { + continue; + } + + std::cout << " Supervisor: " << taskInfo.GetSupervisorContext()->GetId() << std::endl; + std::cout << " Exectuor index: " << taskInfo.GetExecutorIndex() << std::endl; + std::cout << " Paths: " << std::endl; + const std::list& paths = taskInfo.GetPaths(); + + for ( const hurricane::task::PathInfo& path : paths ) { + std::cout << " Path: " << std::endl; + int groupMethod = path.GetGroupMethod(); + std::cout << " Group method: " << groupMethod << std::endl; + if ( path.GetGroupMethod() == hurricane::task::PathInfo::GroupMethod::Global) { + std::cout << " Destination host: " << + path.GetDestinationExecutors()[0].GetSupervisor().GetHost() << std::endl; + std::cout << " Destination port: " << + path.GetDestinationExecutors()[0].GetSupervisor().GetPort() << std::endl; + std::cout << " Destination executor index: " << + path.GetDestinationExecutors()[0].GetExecutorIndex() << std::endl; + } + } + } + } + + void Nimbus::SyncWithSupervisors() + { + for ( SupervisorContext& supervisorContext : _supervisors ) { + std::string supervisorId = supervisorContext.GetId(); + std::cout << "Sync meta data with supervisr: " << supervisorId; + std::shared_ptr supervisorClient = + _supervisorClients[supervisorId]; + + supervisorClient->GetConnector()->Connect([supervisorId, supervisorClient, &supervisorContext, this] { + hurricane::message::Command command(hurricane::message::Command::Type::SyncMetadata); + + // 1 means Nimbus to Supervisor + // 2 means Supervisor to Nimbus + command.AddArgument({ 1 }); + + base::Variants supervisorContextVariants; + supervisorContext.Serialize(supervisorContextVariants); + command.AddArguments(supervisorContextVariants); + supervisorClient->SendCommand(command, + [supervisorId, this](const hurricane::message::Response& response) -> void { + if ( response.GetStatus() == hurricane::message::Response::Status::Successful ) { + std::cout << "Sync with " << supervisorId << " successfully." << std::endl; + } + else { + std::cout << "Sync with " << supervisorId << " failed." << std::endl; + } + }); + }); + } + } + + void Nimbus::ShowSupervisorTaskInfos() + { + std::cout << std::endl << "================ Allocate result ================" << std::endl; + for ( SupervisorContext& supervisorContext : _supervisors ) { + std::cout << supervisorContext.GetId() << std::endl; + std::cout << " Host: " << supervisorContext.GetNetAddress().GetHost() << std::endl; + std::cout << " Port: " << supervisorContext.GetNetAddress().GetPort() << std::endl; + + std::cout << " Tasks: " << std::endl; + const std::vector& taskInfos = + supervisorContext.GetTaskInfos(); + ShowTaskInfos(taskInfos); + } + } + + void Nimbus::CalculateTaskPaths( + const std::map>& nameToBoltTasks, + const std::map& boltDeclarers, + const std::map>& nameToSpoutTasks) + { + for ( const auto& boltPair : boltDeclarers ) { + hurricane::bolt::BoltDeclarer boltDeclarer = boltPair.second; + // No setted source task + if ( boltDeclarer.GetSourceTaskName().empty() ) { + continue; + } + + std::string sourceTaskName = boltDeclarer.GetSourceTaskName(); + std::vector sourceTasks = + FindTask(nameToBoltTasks, nameToSpoutTasks, sourceTaskName); + + std::string destTaskName = boltDeclarer.GetTaskName(); + std::vector destTasks = + FindTask(nameToBoltTasks, destTaskName); + + std::vector destExecutorPositions; + for ( hurricane::task::TaskInfo* destTask : destTasks ) { + destExecutorPositions.push_back(task::ExecutorPosition( + destTask->GetSupervisorContext()->GetNetAddress(), + destTask->GetExecutorIndex() + )); + } + + if ( boltDeclarer.GetGroupMethod() == task::TaskDeclarer::GroupMethod::Global ) { + for ( hurricane::task::TaskInfo* sourceTask : sourceTasks ) { + int destTaskIndex = rand() % destTasks.size(); + hurricane::task::TaskInfo* destTask = destTasks[destTaskIndex]; + + hurricane::task::PathInfo pathInfo; + pathInfo.SetGroupMethod(hurricane::task::PathInfo::GroupMethod::Global); + pathInfo.SetDestinationTask(destTask->GetTaskName()); + pathInfo.SetDestinationExecutors({task::ExecutorPosition( + destTask->GetSupervisorContext()->GetNetAddress(), + destTask->GetExecutorIndex() + )}); + + sourceTask->AddPath(pathInfo); + } + } + else if ( boltDeclarer.GetGroupMethod() == task::TaskDeclarer::GroupMethod::Field ) { + // Resolve the destination by field when run task. + for ( hurricane::task::TaskInfo* sourceTask : sourceTasks ) { + hurricane::task::PathInfo pathInfo; + pathInfo.SetGroupMethod(hurricane::task::PathInfo::GroupMethod::Field); + pathInfo.SetDestinationTask(destTaskName); + pathInfo.SetFieldName(boltDeclarer.GetGroupField()); + + sourceTask->AddPath(pathInfo); + } + + TaskPathName taskPathName = { sourceTaskName, destTaskName }; + _fieldsCandidates[taskPathName] = destExecutorPositions; + } + else if ( boltDeclarer.GetGroupMethod() == task::TaskDeclarer::GroupMethod::Random ) { + // Resolve the destination by field when run task. + for ( hurricane::task::TaskInfo* sourceTask : sourceTasks ) { + hurricane::task::PathInfo pathInfo; + pathInfo.SetGroupMethod(hurricane::task::PathInfo::GroupMethod::Random); + pathInfo.SetDestinationTask(destTaskName); + pathInfo.SetDestinationExecutors(destExecutorPositions); + + sourceTask->AddPath(pathInfo); + } + } + else { + std::cerr << "Unsupported group method occured" << std::endl; + exit(EXIT_FAILURE); + } + } + } + + void Nimbus::ShowSupervisorMetadata() + { + std::cout << std::endl << "================ Supervisor metadata ================" << std::endl; + for ( SupervisorContext& supervisorContext : _supervisors ) { + std::cout << "Supervisor name: " << supervisorContext.GetId() << std::endl; + std::cout << " Spout count: " << supervisorContext.GetSpoutCount() << std::endl; + std::cout << " Bolt count: " << supervisorContext.GetBoltCount() << std::endl; + std::cout << " Task info count: " << supervisorContext.GetTaskInfos().size() << std::endl; + std::cout << " Free spout count: " << supervisorContext.GetFreeSpouts().size() << std::endl; + std::cout << " Free bolt count: " << supervisorContext.GetFreeBolts().size() << std::endl; + std::cout << " Busy spout count: " << supervisorContext.GetBusySpouts().size() << std::endl; + std::cout << " Busy bolt count: " << supervisorContext.GetBusyBolts().size() << std::endl; + } + } + + void Nimbus::SubmitTopology(hurricane::topology::Topology* topology) { + std::cout << "Submit topology: " << topology->GetName() << std::endl; + + const std::map& spoutDeclarers = + topology->GetSpoutDeclarers(); + const std::map& boltDeclarers = + topology->GetBoltDeclarers(); + + // Allocate task and send to supervisor + std::list originSpoutTasks = + GetAllSpoutTasks(spoutDeclarers, topology); + std::map> nameToSpoutTasks = + AllocateSpoutTasks(originSpoutTasks); + + std::list originBoltTasks = + GetAllBoltTasks(topology, boltDeclarers); + std::map> nameToBoltTasks = + AllocateBoltTasks(originBoltTasks); + + CalculateTaskPaths(nameToBoltTasks, boltDeclarers, nameToSpoutTasks); + ShowSupervisorTaskInfos(); + ShowSupervisorMetadata(); + SyncWithSupervisors(); + } + + const int MAX_HEARTBEAT_FAILED_TIMES = 5; + void Nimbus::SendHeartbeat(const std::string supervisorId) + { + std::cout << "Sending heartbeat to " << supervisorId << std::endl; + + int sendTimes = 0; + while ( true ) { + try { + std::shared_ptr commandClient = + _supervisorClients.at(supervisorId); + + commandClient->GetConnector()->Connect([commandClient, supervisorId, this] { + std::cout << "Connected to " << supervisorId << std::endl; + hurricane::message::Command command(hurricane::message::Command::Type::Heartbeat); + + commandClient->SendCommand(command, + [supervisorId, this](const hurricane::message::Response& response) -> void { + if ( response.GetStatus() == hurricane::message::Response::Status::Successful ) { + std::cout << supervisorId << " alived." << std::endl; + } + else { + std::cout << supervisorId << " dead." << std::endl; + } + }); + }); + + break; + } + catch ( const std::exception& e ) { + std::cerr << "Error in sending heartbeat to " << supervisorId << std::endl; + std::cerr << e.what() << std::endl; + + sendTimes ++; + std::cout << "Sendtimes: " << sendTimes << std::endl; + if ( sendTimes >= MAX_HEARTBEAT_FAILED_TIMES ) { + break; + } + + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); } } } } -} \ No newline at end of file +} diff --git a/src/hurricane/service/Supervisor.cpp b/src/hurricane/service/Supervisor.cpp index 032c7e2..4b9c3d7 100755 --- a/src/hurricane/service/Supervisor.cpp +++ b/src/hurricane/service/Supervisor.cpp @@ -2,61 +2,334 @@ #include "hurricane/message/CommandClient.h" #include "hurricane/util/NetConnector.h" #include "hurricane/util/Configuration.h" +#include "hurricane/topology/Topology.h" +#include "hurricane/topology/TopologyLoader.h" +#include "hurricane/task/SpoutExecutor.h" +#include "hurricane/task/BoltExecutor.h" +#include "hurricane/spout/ISpout.h" +#include "hurricane/bolt/IBolt.h" +#include "hurricane/collector/OutputCollector.h" +#include "hurricane/collector/OutputQueue.h" +#include "hurricane/collector/TaskQueue.h" namespace hurricane { - namespace service { - Supervisor::Supervisor(const hurricane::util::Configuration& configuration) : + namespace service { + + Supervisor::Supervisor(const hurricane::util::Configuration& configuration) : CommandServer(new hurricane::util::NetListener(hurricane::base::NetAddress( configuration.GetProperty("supervisor.host"), - configuration.GetIntegerProperty("supervisor.port")))) { - _supervisorConfiguration.reset(new hurricane::util::Configuration(configuration)); - - hurricane::base::NetAddress nimbusAddress(configuration.GetProperty("nimbus.host"), - configuration.GetIntegerProperty("nimbus.port")); - _nimbusConnector = new hurricane::util::NetConnector(nimbusAddress); - _nimbusClient = new hurricane::message::CommandClient(_nimbusConnector); - - _name = configuration.GetProperty("supervisor.name"); - InitSelfContext(); - OnConnection(std::bind(&Supervisor::OnConnect, this, std::placeholders::_1)); - } - - void Supervisor::OnConnect(SupervisorContext* context) { - } - - void Supervisor::JoinNimbus(JoinNimbusCallback callback) { - hurricane::message::CommandClient* commandClient = _nimbusClient; - - _nimbusConnector->Connect([commandClient, callback, this]() { - hurricane::message::Command command(hurricane::message::Command::Type::Join); - command.AddArgument({ "supervisor" }); - std::vector context = _selfContext->ToVariants(); - command.AddArguments(context); - - commandClient->SendCommand(command, [callback](const hurricane::message::Response& response) { - callback(response); - }); - }); - } - - void Supervisor::InitSelfContext() { - this->_selfContext.reset(new SupervisorContext); - _selfContext->SetId(_name); - _selfContext->SetSpoutCount(_supervisorConfiguration->GetIntegerProperty("supervisor.spout.num")); - _selfContext->SetBoltCount(_supervisorConfiguration->GetIntegerProperty("supervisor.bolt.num")); - _selfContext->SetTaskInfos(std::vector(_selfContext->GetSpoutCount() + _selfContext->GetBoltCount())); - - std::set freeSpouts; - for ( int spoutIndex = 0; spoutIndex != _selfContext->GetSpoutCount(); ++ spoutIndex ) { - freeSpouts.insert(spoutIndex); + configuration.GetIntegerProperty("supervisor.port")))), + _host(configuration.GetProperty("supervisor.host")), + _port(configuration.GetIntegerProperty("supervisor.port")) { + _supervisorConfiguration.reset(new hurricane::util::Configuration(configuration)); + _name = configuration.GetProperty("supervisor.name"); + + InitNimbusConnector(); + InitSelfContext(); + ReserveExecutors(); + InitEvents(); + } + + void Supervisor::InitNimbusConnector() + { + hurricane::base::NetAddress nimbusAddress(_supervisorConfiguration->GetProperty("nimbus.host"), + _supervisorConfiguration->GetIntegerProperty("nimbus.port")); + _nimbusConnector = new hurricane::util::NetConnector(nimbusAddress); + _nimbusClient = new hurricane::message::CommandClient(_nimbusConnector); + } + + void Supervisor::ReserveExecutors() + { + _spoutExecutors.resize(_supervisorConfiguration->GetIntegerProperty("supervisor.spout.num")); + _boltExecutors.resize(_supervisorConfiguration->GetIntegerProperty("supervisor.bolt.num")); + _spoutCollectors.resize(_supervisorConfiguration->GetIntegerProperty("supervisor.spout.num")); + _boltCollectors.resize(_supervisorConfiguration->GetIntegerProperty("supervisor.bolt.num")); + _boltTaskQueues.resize(_supervisorConfiguration->GetIntegerProperty("supervisor.bolt.num")); + + for ( auto& boltTask : _boltTaskQueues ) { + boltTask.reset(new collector::TaskQueue); + } + + _outputDispatcher.SetQueue(std::shared_ptr( + new collector::OutputQueue())); + _outputDispatcher.SetSelfAddress(hurricane::base::NetAddress(_host, _port)); + _outputDispatcher.SetSelfTasks(_boltTaskQueues); + _outputDispatcher.SetSelfSpoutCount(_spoutExecutors.size()); + + hurricane::base::NetAddress nimbusAddress(_supervisorConfiguration->GetProperty("nimbus.host"), + _supervisorConfiguration->GetIntegerProperty("nimbus.port")); + _nimbusConnector = new hurricane::util::NetConnector(nimbusAddress); + _nimbusClient = new hurricane::message::CommandClient(_nimbusConnector); + _outputDispatcher.SetNimbusClient(_nimbusClient); + + _outputDispatcher.Start(); + } + + void Supervisor::InitEvents() + { + OnConnection(std::bind(&Supervisor::OnConnect, this, std::placeholders::_1)); + OnCommand(hurricane::message::Command::Type::Heartbeat, this, &Supervisor::OnHeartbeat); + OnCommand(hurricane::message::Command::Type::SyncMetadata, this, &Supervisor::OnSyncMetadata); + OnCommand(hurricane::message::Command::Type::SendTuple, this, &Supervisor::OnSendTuple); + } + + void Supervisor::InitTaskFieldsMap() + { + const std::map& spoutDeclarers = + _topology->GetSpoutDeclarers(); + for ( const auto& spoutDeclarerPair : spoutDeclarers ) { + const spout::SpoutDeclarer& spoutDeclarer = spoutDeclarerPair.second; + + _taskFields[spoutDeclarer.GetTaskName()] = &spoutDeclarer.GetFields(); + _taskFieldsMap[spoutDeclarer.GetTaskName()] = &spoutDeclarer.GetFieldsMap(); + } + + const std::map& boltDeclarers = + _topology->GetBoltDeclarers(); + for ( const auto& boltDeclarerPair : boltDeclarers ) { + const bolt::BoltDeclarer& boltDeclarer = boltDeclarerPair.second; + + _taskFields[boltDeclarer.GetTaskName()] = &boltDeclarer.GetFields(); + _taskFieldsMap[boltDeclarer.GetTaskName()] = &boltDeclarer.GetFieldsMap(); + } + + _outputDispatcher.SetTaskFields(_taskFields); + _outputDispatcher.SetTaskFieldsMap(_taskFieldsMap); + } + + void Supervisor::OnConnect(SupervisorContext* context) { + } + + void Supervisor::JoinNimbus(JoinNimbusCallback callback) { + hurricane::message::CommandClient* commandClient = _nimbusClient; + + _nimbusConnector->Connect([commandClient, callback, this]() { + hurricane::message::Command command(hurricane::message::Command::Type::Join); + command.AddArgument({ "supervisor" }); + command.AddArgument({ this->_host }); + command.AddArgument({ this->_port }); + std::vector context; + _selfContext->Serialize(context); + command.AddArguments(context); + + commandClient->SendCommand(command, [callback](const hurricane::message::Response& response) { + callback(response); + }); + }); + } + + void Supervisor::OnHeartbeat(SupervisorContext* context, const message::Command& command, + hurricane::message::CommandServer::Responser responser) + { + hurricane::message::Response response(hurricane::message::Response::Status::Successful); + response.AddArgument({ _name }); + + responser(response); + } + + void Supervisor::OnSyncMetadata(SupervisorContext* context, const message::Command& command, + message::CommandServer::Responser responser) + { + const std::vector& arguments = command.GetArguments(); + + int syncMethod = arguments[0].GetInt32Value(); + if ( syncMethod != 1 ) { + hurricane::message::Response response(hurricane::message::Response::Status::Failed); + responser(response); + + return; + } + + hurricane::message::Response response(hurricane::message::Response::Status::Successful); + base::Variants::const_iterator currentIterator = arguments.cbegin() + 1; + _selfContext->Deserialize(currentIterator); + + OwnSupervisorTasks(); + _outputDispatcher.SetTaskInfos(_selfContext->GetTaskInfos()); + + ShowSupervisorMetadata(); + ShowTaskInfos(); + + std::string topologyName = _supervisorConfiguration->GetProperty("topology.name"); + _topology = hurricane::topology::TopologyLoader::GetInstance().GetTopology(topologyName); + + InitTaskFieldsMap(); + InitExecutors(); + + responser(response); + } + + void Supervisor::OnSendTuple(SupervisorContext* context, const message::Command& command, + message::CommandServer::Responser responser) + { + const base::Variants& arguments = command.GetArguments(); + base::Variants::const_iterator it = arguments.cbegin(); + + base::NetAddress sourceAddress; + sourceAddress.Deserialize(it); + + task::ExecutorPosition destination; + destination.Deserialize(it); + + base::Tuple tuple; + tuple.Deserialize(it); + tuple.SetFields(_taskFields[tuple.GetSourceTask()]); + tuple.SetFieldsMap(_taskFieldsMap[tuple.GetSourceTask()]); + + int executorIndex = destination.GetExecutorIndex(); + int boltIndex = executorIndex - _selfContext->GetSpoutCount(); + + std::shared_ptr taskQueue = _boltTaskQueues[boltIndex]; + collector::TaskItem* taskItem = + new collector::TaskItem(executorIndex, tuple); + taskQueue->Push(taskItem); + + hurricane::message::Response response(hurricane::message::Response::Status::Successful); + responser(response); + } + + void Supervisor::InitSelfContext() { + this->_selfContext.reset(new SupervisorContext); + _selfContext->SetId(_name); + _selfContext->SetSpoutCount(_supervisorConfiguration->GetIntegerProperty("supervisor.spout.num")); + _selfContext->SetBoltCount(_supervisorConfiguration->GetIntegerProperty("supervisor.bolt.num")); + _selfContext->SetTaskInfos(std::vector(_selfContext->GetSpoutCount() + _selfContext->GetBoltCount())); + + std::set freeSpouts; + for ( int spoutIndex = 0; spoutIndex != _selfContext->GetSpoutCount(); ++ spoutIndex ) { + freeSpouts.insert(spoutIndex); + } + _selfContext->SetFreeSpouts(freeSpouts); + + std::set freeBolts; + for ( int boltIndex = 0; boltIndex != _selfContext->GetBoltCount(); ++ boltIndex ) { + freeBolts.insert(boltIndex); + } + _selfContext->SetFreeBolts(freeBolts); + } + + void Supervisor::InitSpoutExecutors() + { + std::cout << "Init spout executors" << std::endl; + const std::map& spoutDeclarers = + _topology->GetSpoutDeclarers(); + std::set busySpouts = _selfContext->GetBusySpouts(); + for ( int spoutIndex : busySpouts ) { + hurricane::task::TaskInfo& spoutTask = _selfContext->GetSpoutTaskInfo(spoutIndex); + std::string taskName = spoutTask.GetTaskName(); + const hurricane::spout::SpoutDeclarer& spoutDeclarer = spoutDeclarers.at(taskName); + + std::shared_ptr outputQueue = _outputDispatcher.GetQueue(); + collector::OutputCollector* collector = new collector::OutputCollector(spoutIndex, + taskName, outputQueue); + _spoutCollectors[spoutIndex].reset(collector); + + spout::ISpout* spout = spoutDeclarer.GetSpout()->Clone(); + spout->Prepare(_spoutCollectors[spoutIndex]); + + std::shared_ptr spoutExecutor(new task::SpoutExecutor); + spoutExecutor->SetSpout(spout); + int flowParam = _supervisorConfiguration->GetIntegerProperty("spout.flow.param"); + spoutExecutor->SetFlowParam(flowParam); + _spoutExecutors[spoutIndex] = spoutExecutor; + } + } + + void Supervisor::InitBoltExecutors() + { + std::cout << "Init bolt executors" << std::endl; + const std::map& boltDeclarers = + _topology->GetBoltDeclarers(); + std::set busyBolts = _selfContext->GetBusyBolts(); + int spoutCount = _selfContext->GetSpoutCount(); + for ( int boltIndex : busyBolts ) { + std::cout << boltIndex << std::endl; + + hurricane::task::TaskInfo& boltTask = _selfContext->GetBoltTaskInfo(boltIndex); + std::string taskName = boltTask.GetTaskName(); + const hurricane::bolt::BoltDeclarer& boltDeclarer = boltDeclarers.at(taskName); + + std::shared_ptr outputQueue = _outputDispatcher.GetQueue(); + collector::OutputCollector* collector = new collector::OutputCollector( + spoutCount + boltIndex, taskName, outputQueue); + _boltCollectors[boltIndex].reset(collector); + + bolt::IBolt* bolt = boltDeclarer.GetBolt()->Clone(); + bolt->Prepare(_boltCollectors[boltIndex]); + + std::shared_ptr boltExecutor(new task::BoltExecutor); + _boltExecutors[boltIndex] = boltExecutor; + boltExecutor->SetTaskQueue(_boltTaskQueues[boltIndex]); + boltExecutor->SetBolt(bolt); + } + } + + void Supervisor::InitExecutors() + { + InitSpoutExecutors(); + InitBoltExecutors(); + + std::set busyBolts = _selfContext->GetBusyBolts(); + std::set busySpouts = _selfContext->GetBusySpouts(); + + for ( int boltIndex : busyBolts ) { + _boltExecutors[boltIndex]->Start(); + } + + for ( int spoutIndex : busySpouts ) { + _spoutExecutors[spoutIndex]->Start(); + } + } + + void Supervisor::OwnSupervisorTasks() + { + std::vector& taskInfos = _selfContext->GetTaskInfos(); + for ( hurricane::task::TaskInfo& taskInfo : taskInfos ) { + taskInfo.SetSupervisorContext(_selfContext.get()); + } + } + + void Supervisor::ShowSupervisorMetadata() + { + std::cout << "Supervisor name: " << _selfContext->GetId() << std::endl; + std::cout << " Spout count: " << _selfContext->GetSpoutCount() << std::endl; + std::cout << " Bolt count: " << _selfContext->GetBoltCount() << std::endl; + std::cout << " Task info count: " << _selfContext->GetTaskInfos().size() << std::endl; + std::cout << " Free spout count: " << _selfContext->GetFreeSpouts().size() << std::endl; + std::cout << " Free bolt count: " << _selfContext->GetFreeBolts().size() << std::endl; + std::cout << " Busy spout count: " << _selfContext->GetBusySpouts().size() << std::endl; + std::cout << " Busy bolt count: " << _selfContext->GetBusyBolts().size() << std::endl; + } + + void Supervisor::ShowTaskInfos() + { + const std::vector& taskInfos = _selfContext->GetTaskInfos(); + for ( const hurricane::task::TaskInfo& taskInfo : taskInfos ) { + if ( !taskInfo.GetSupervisorContext() ) { + continue; } - _selfContext->SetFreeSpouts(freeSpouts); - std::set freeBolts; - for ( int boltIndex = 0; boltIndex != _selfContext->GetBoltCount(); ++ boltIndex ) { - freeBolts.insert(boltIndex); + std::cout << " Supervisor: " << taskInfo.GetSupervisorContext()->GetId() << std::endl; + std::cout << " Exectuor index: " << taskInfo.GetExecutorIndex() << std::endl; + std::cout << " Task name: " << taskInfo.GetTaskName() << std::endl; + std::cout << " Paths: " << std::endl; + const std::list& paths = taskInfo.GetPaths(); + + for ( const hurricane::task::PathInfo& path : paths ) { + std::cout << " Path: " << std::endl; + int groupMethod = path.GetGroupMethod(); + std::cout << " Group method: " << groupMethod << std::endl; + + if ( path.GetGroupMethod() == hurricane::task::PathInfo::GroupMethod::Global) { + std::cout << " Destination host: " << + path.GetDestinationExecutors()[0].GetSupervisor().GetHost() << std::endl; + std::cout << " Destination port: " << + path.GetDestinationExecutors()[0].GetSupervisor().GetPort() << std::endl; + std::cout << " Destination executor index: " << + path.GetDestinationExecutors()[0].GetExecutorIndex() << std::endl; + } } - _selfContext->SetFreeBolts(freeBolts); } - } -} \ No newline at end of file + } +} +} diff --git a/src/hurricane/service/SupervisorContext.cpp b/src/hurricane/service/SupervisorContext.cpp index 6dacfd3..308ae28 100755 --- a/src/hurricane/service/SupervisorContext.cpp +++ b/src/hurricane/service/SupervisorContext.cpp @@ -1,103 +1,39 @@ #include "hurricane/service/SupervisorContext.h" namespace hurricane { - namespace service { - SupervisorContext::SupervisorContext() : _spoutCount(0), _boltCount(0) { - } - - std::vector SupervisorContext::ToVariants() { - std::vector variants; - - variants.push_back({ _id }); - variants.push_back({ _spoutCount }); - variants.push_back({ _boltCount }); - - hurricane::base::Variants freeSpoutsVariants = hurricane::base::Variant::FromStdSet(_freeSpouts); - variants.insert(variants.end(), freeSpoutsVariants.begin(), freeSpoutsVariants.end()); - - hurricane::base::Variants freeBoltsVariants = hurricane::base::Variant::FromStdSet(_freeBolts); - variants.insert(variants.end(), freeBoltsVariants.begin(), freeBoltsVariants.end()); - - hurricane::base::Variants busySpoutsVariants = hurricane::base::Variant::FromStdSet(_busySpouts); - variants.insert(variants.end(), busySpoutsVariants.begin(), busySpoutsVariants.end()); - - hurricane::base::Variants busyBoltsVariants = hurricane::base::Variant::FromStdSet(_busyBolts); - variants.insert(variants.end(), busyBoltsVariants.begin(), busyBoltsVariants.end()); - - return variants; - } - - void SupervisorContext::ParseVariants(const std::vector& variants) { - _id = variants[0].GetStringValue(); - _spoutCount = variants[1].GetIntValue(); - _boltCount = variants[2].GetIntValue(); - - int32_t currentIndex = 3; - - int32_t freeSpoutsSize = variants[currentIndex].GetIntValue(); - currentIndex ++; - _freeSpouts = hurricane::base::Variant::ToStdSet(variants.cbegin() + currentIndex, variants.cbegin() + currentIndex + freeSpoutsSize); - currentIndex += freeSpoutsSize; - - int32_t freeBoltsSize = variants[currentIndex].GetIntValue(); - currentIndex ++; - _freeBolts = hurricane::base::Variant::ToStdSet(variants.cbegin() + currentIndex, variants.cbegin() + currentIndex + freeBoltsSize); - currentIndex += freeBoltsSize; - - int32_t busySpoutsSize = variants[currentIndex].GetIntValue(); - currentIndex ++; - _busySpouts = hurricane::base::Variant::ToStdSet(variants.cbegin() + currentIndex, variants.cbegin() + currentIndex + busySpoutsSize); - currentIndex += busySpoutsSize; - - int32_t busyBoltsSize = variants[currentIndex].GetIntValue(); - currentIndex ++; - _busyBolts = hurricane::base::Variant::ToStdSet(variants.cbegin() + currentIndex, variants.cbegin() + currentIndex + busyBoltsSize); - currentIndex += busyBoltsSize; - } - - void SupervisorContext::ParseVariants(std::vector::const_iterator begin) { - auto currentIterator = begin; - - _id = currentIterator->GetStringValue(); - currentIterator ++; - _spoutCount = currentIterator->GetIntValue(); - currentIterator ++; - _boltCount = currentIterator->GetIntValue(); - currentIterator ++; - - int32_t freeSpoutsSize = currentIterator->GetIntValue(); - currentIterator ++; - _freeSpouts = hurricane::base::Variant::ToStdSet(currentIterator, currentIterator + freeSpoutsSize); - currentIterator += freeSpoutsSize; - - int32_t freeBoltsSize = currentIterator->GetIntValue(); - currentIterator ++; - _freeBolts = hurricane::base::Variant::ToStdSet(currentIterator, currentIterator + freeBoltsSize); - currentIterator += freeBoltsSize; - - int32_t busySpoutsSize = currentIterator->GetIntValue(); - currentIterator ++; - _busySpouts = hurricane::base::Variant::ToStdSet(currentIterator, currentIterator + busySpoutsSize); - currentIterator += busySpoutsSize; - - int32_t busyBoltsSize = currentIterator->GetIntValue(); - currentIterator ++; - _busyBolts = hurricane::base::Variant::ToStdSet(currentIterator, currentIterator + busyBoltsSize); - currentIterator += busyBoltsSize; - } - - SupervisorContext SupervisorContext::FromVariants(std::vector::const_iterator begin) { - SupervisorContext context; - context.ParseVariants(begin); - - return context; - } - - SupervisorContext SupervisorContext::FromVariants(const std::vector& variants) { - SupervisorContext context; - context.ParseVariants(variants); - - return context; - } - } -} \ No newline at end of file +namespace service { + +using hurricane::base::Variant; +using hurricane::base::Variants; +using hurricane::base::Serializable; + +SupervisorContext::SupervisorContext() : _spoutCount(0), _boltCount(0) { +} + +void SupervisorContext::Serialize(base::Variants& variants) const +{ + Variant::Serialize(variants, _id); + Variant::Serialize(variants, _spoutCount); + Variant::Serialize(variants, _boltCount); + Variant::Serialize(variants, _freeSpouts); + Variant::Serialize(variants, _freeBolts); + Variant::Serialize(variants, _busySpouts); + Variant::Serialize(variants, _busyBolts); + Variant::Serialize(variants, _taskInfos); + +} + +void SupervisorContext::Deserialize(Variants::const_iterator& it) +{ + Variant::Deserialize(it, _id); + Variant::Deserialize(it, _spoutCount); + Variant::Deserialize(it, _boltCount); + Variant::Deserialize(it, _freeSpouts); + Variant::Deserialize(it, _freeBolts); + Variant::Deserialize(it, _busySpouts); + Variant::Deserialize(it, _busyBolts); + Variant::Deserialize(it, _taskInfos); +} + +} +} diff --git a/src/hurricane/spout/SpoutDeclarer.cpp b/src/hurricane/spout/SpoutDeclarer.cpp index 45c0ba2..d5bf293 100755 --- a/src/hurricane/spout/SpoutDeclarer.cpp +++ b/src/hurricane/spout/SpoutDeclarer.cpp @@ -7,6 +7,12 @@ namespace hurricane { _spout(spout){ SetType(hurricane::task::TaskDeclarer::Type::Spout); SetTaskName(spoutName); + + _fields = _spout->DeclareFields(); + int fieldIndex = 0; + for ( const std::string& field : _fields ) { + _fieldsMap.insert({field, fieldIndex}); + } } } -} \ No newline at end of file +} diff --git a/src/hurricane/task/BoltExecutor.cpp b/src/hurricane/task/BoltExecutor.cpp new file mode 100644 index 0000000..59665c6 --- /dev/null +++ b/src/hurricane/task/BoltExecutor.cpp @@ -0,0 +1,36 @@ +#include "hurricane/task/BoltExecutor.h" +#include "hurricane/bolt/IBolt.h" +#include "hurricane/collector/TaskQueue.h" + +namespace hurricane { +namespace task { + +BoltExecutor::BoltExecutor() +{ + _loop.MessageMap(Executor::MessageType::OnTuple, this, &BoltExecutor::OnTuple); +} + +void BoltExecutor::Start() +{ + _thread = std::thread(&BoltExecutor::StartLoop, this); +} + +void BoltExecutor::OnTuple(message::Message& message) +{ +} + +void BoltExecutor::StartLoop() +{ + collector::TaskItem* taskItem; + + while ( _taskQueue->Pop(taskItem) ) { + _bolt->Execute(taskItem->GetTuple()); + + delete taskItem; + taskItem = nullptr; + } +} + + +} +} diff --git a/src/hurricane/task/Executor.cpp b/src/hurricane/task/Executor.cpp new file mode 100644 index 0000000..95e488c --- /dev/null +++ b/src/hurricane/task/Executor.cpp @@ -0,0 +1 @@ +#include "hurricane/task/Executor.h" diff --git a/src/hurricane/task/PathInfo.cpp b/src/hurricane/task/PathInfo.cpp new file mode 100644 index 0000000..4f6f62b --- /dev/null +++ b/src/hurricane/task/PathInfo.cpp @@ -0,0 +1,40 @@ +#include "hurricane/task/TaskInfo.h" +#include "hurricane/base/Variant.h" + +namespace hurricane { +namespace task { + +using hurricane::base::Variant; +using hurricane::base::Variants; +using hurricane::base::Serializable; + +void PathInfo::Serialize(base::Variants& variants) const +{ + Variant::Serialize(variants, _groupMethod); + Variant::Serialize(variants, _destinationTask); + Variant::Serialize(variants, _fieldName); + Variant::Serialize(variants, _destinationExecutors); +} + +void PathInfo::Deserialize(Variants::const_iterator& it) +{ + Variant::Deserialize(it, _groupMethod); + Variant::Deserialize(it, _destinationTask); + Variant::Deserialize(it, _fieldName); + Variant::Deserialize(it, _destinationExecutors); +} + +void ExecutorPosition::Serialize(base::Variants& variants) const +{ + Variant::Serialize(variants, _supervisor); + Variant::Serialize(variants, _executorIndex); +} + +void ExecutorPosition::Deserialize(Variants::const_iterator& it) +{ + Variant::Deserialize(it, _supervisor); + Variant::Deserialize(it, _executorIndex); +} + +} +} diff --git a/src/hurricane/task/SpoutExecutor.cpp b/src/hurricane/task/SpoutExecutor.cpp new file mode 100644 index 0000000..b5d71d5 --- /dev/null +++ b/src/hurricane/task/SpoutExecutor.cpp @@ -0,0 +1,43 @@ +#include "hurricane/task/SpoutExecutor.h" +#include "hurricane/spout/ISpout.h" +#include + +namespace hurricane { +namespace task { + +SpoutExecutor::SpoutExecutor() +{ + +} + +void SpoutExecutor::Start() +{ + _thread = std::thread(&SpoutExecutor::MainLoop, this); +} + +void SpoutExecutor::SetSpout(spout::ISpout* spout) +{ + _spout.reset(spout); +} + +void SpoutExecutor::MainLoop() +{ + int flowTime = 1000 * 1000 / _flowParam; + while ( true ) { + _spout->NextTuple(); + std::this_thread::sleep_for(std::chrono::microseconds(flowTime)); + } +} + +int SpoutExecutor::GetFlowParam() const +{ + return _flowParam; +} + +void SpoutExecutor::SetFlowParam(int flowParam) +{ + _flowParam = flowParam; +} + +} +} diff --git a/src/hurricane/task/TaskInfo.cpp b/src/hurricane/task/TaskInfo.cpp new file mode 100644 index 0000000..599581a --- /dev/null +++ b/src/hurricane/task/TaskInfo.cpp @@ -0,0 +1,27 @@ +#include "hurricane/task/TaskInfo.h" + +namespace hurricane { +namespace task { + +using hurricane::base::Variant; +using hurricane::base::Variants; +using hurricane::base::Serializable; + +void TaskInfo::Serialize(base::Variants& variants) const +{ + Variant::Serialize(variants, _topologyName); + Variant::Serialize(variants, _taskName); + Variant::Serialize(variants, _paths); + Variant::Serialize(variants, _executorIndex); +} + +void TaskInfo::Deserialize(base::Variants::const_iterator& it) +{ + Variant::Deserialize(it, _topologyName); + Variant::Deserialize(it, _taskName); + Variant::Deserialize(it, _paths); + Variant::Deserialize(it, _executorIndex); +} + +} +} diff --git a/src/hurricane/tool/StartNimbus.cpp b/src/hurricane/tool/StartNimbus.cpp index 29d6402..41e67c7 100755 --- a/src/hurricane/tool/StartNimbus.cpp +++ b/src/hurricane/tool/StartNimbus.cpp @@ -20,9 +20,9 @@ int main(int argc, char *argv[]) } void StartNimbus(const std::string& configFileName) { - hurricane::util::Configuration supervisorConfiguration; - supervisorConfiguration.Parse(configFileName); + hurricane::util::Configuration nimbusConfigratuion; + nimbusConfigratuion.Parse(configFileName); - hurricane::service::Nimbus nimbus(hurricane::base::NetAddress("127.0.0.1", 6009)); + hurricane::service::Nimbus nimbus(nimbusConfigratuion); nimbus.StartListen(); -} \ No newline at end of file +} diff --git a/src/hurricane/tool/StartSupervisor.cpp b/src/hurricane/tool/StartSupervisor.cpp index e85f368..e4bc0d5 100755 --- a/src/hurricane/tool/StartSupervisor.cpp +++ b/src/hurricane/tool/StartSupervisor.cpp @@ -28,7 +28,7 @@ void StartSupervisor(const std::string& configFileName) { std::cout << supervisorConfiguration.GetProperty("supervisor.host") << std::endl; std::cout << supervisorConfiguration.GetIntegerProperty("supervisor.port") << std::endl; - hurricane::service::Supervisor supervisor(supervisorConfiguration); + hurricane::service::Supervisor supervisor(supervisorConfiguration); supervisor.JoinNimbus([&supervisor](const hurricane::message::Response& response) { if ( response.GetStatus() != hurricane::message::Response::Status::Successful ) { std::cerr << "Can't join nimbus." << std::endl; @@ -36,7 +36,10 @@ void StartSupervisor(const std::string& configFileName) { exit(EXIT_FAILURE); } + else { + std::cout << "Join successfully" << std::endl; + } - supervisor.StartListen(); + supervisor.StartListen(); }); } diff --git a/src/hurricane/topology/TopologyLoader.cpp b/src/hurricane/topology/TopologyLoader.cpp index 22202b9..aa9525e 100755 --- a/src/hurricane/topology/TopologyLoader.cpp +++ b/src/hurricane/topology/TopologyLoader.cpp @@ -1,4 +1,5 @@ #include "hurricane/topology/TopologyLoader.h" +#include "hurricane/base/Library.h" #include "sample/wordcount/WordCountTopology.h" namespace hurricane { @@ -9,14 +10,21 @@ namespace hurricane { return instance; } - TopologyLoader::TopologyLoader() { - Topology* topology = ::GetTopology(); - - _topologies.insert({ topology->GetName(), std::shared_ptr(topology) }); + TopologyLoader::TopologyLoader() { } std::shared_ptr TopologyLoader::GetTopology(const std::string& topologyName) { + if ( _libraryHandles.find(topologyName) == _libraryHandles.end() ) { + LibraryHandle libraryHandle = HurricaneLibraryLoad(topologyName); + _libraryHandles[topologyName] = libraryHandle; + TopologyGetter topologyGetter = + HurricaneLibraryGetSymbol(libraryHandle, "GetTopology"); + + std::cout << "Getter: " << topologyGetter << std::endl; + _topologies[topologyName].reset(topologyGetter()); + } + return _topologies[topologyName]; } } -} \ No newline at end of file +} diff --git a/src/hurricane/util/NetConnector.cpp b/src/hurricane/util/NetConnector.cpp index add099e..a35ee71 100755 --- a/src/hurricane/util/NetConnector.cpp +++ b/src/hurricane/util/NetConnector.cpp @@ -17,6 +17,7 @@ */ #include "hurricane/util/NetConnector.h" +#include namespace hurricane { namespace util { @@ -24,9 +25,18 @@ namespace hurricane { void NetConnector::Connect() { - _client = std::make_shared(); - - _client->Connect(_host.GetHost(), _host.GetPort()); + if ( !_client.get() ) { + _client = std::make_shared(); + + try { + _client->Connect(_host.GetHost(), _host.GetPort()); + } + catch ( const std::exception& e ) { + std::cout << "Release client" << std::endl; + _client.reset(); + throw e; + } + } } void NetConnector::Connect(ConnectCallback callback) { @@ -41,8 +51,8 @@ namespace hurricane { } void NetConnector::SendAndReceive(const char* buffer, int32_t size, DataReceiver receiver) - { - _client->Send(buffer, size); + { + _client->Send(buffer, size); char resultBuffer[RECEIVE_BUFFER_SIZE]; int readSize = _client->Receive(resultBuffer, RECEIVE_BUFFER_SIZE); diff --git a/src/hurricane/util/NetListener.cpp b/src/hurricane/util/NetListener.cpp index 3c8b4aa..4be62d4 100755 --- a/src/hurricane/util/NetListener.cpp +++ b/src/hurricane/util/NetListener.cpp @@ -20,8 +20,8 @@ #include #include #include - -namespace hurricane { + +namespace hurricane { namespace util { const int DATA_BUFFER_SIZE = 65535; @@ -29,8 +29,8 @@ namespace hurricane { { _server = std::make_shared(); - _server->Listen(_host.GetHost(), _host.GetPort()); - std::cout << "Listen on " << _host.GetHost() << ":" << _host.GetPort() << std::endl; + _server->Listen(_host.GetHost(), _host.GetPort()); + std::cout << "Listen on " << _host.GetHost() << ":" << _host.GetPort() << std::endl; while ( 1 ) { diff --git a/src/hurricane/util/StringUtil.cpp b/src/hurricane/util/StringUtil.cpp index 7e399bc..c9e5ef2 100755 --- a/src/hurricane/util/StringUtil.cpp +++ b/src/hurricane/util/StringUtil.cpp @@ -20,6 +20,7 @@ #include #include #include +#include using std::vector; using std::string; @@ -73,3 +74,23 @@ std::string RandomString(const std::string & candidate, int length) return result; } + +std::string Int2String(int value) +{ + std::ostringstream os; + os << value; + + return os.str(); +} + +std::string JoinStrings(const std::vector& words) +{ + std::string sentence; + for ( const std::string& word : words ) { + sentence += word + ' '; + } + + sentence.pop_back(); + + return sentence; +} diff --git a/src/sample/wordcount/HelloWorldSpout.cpp b/src/sample/wordcount/HelloWorldSpout.cpp index 9e9a682..9ef3f8f 100755 --- a/src/sample/wordcount/HelloWorldSpout.cpp +++ b/src/sample/wordcount/HelloWorldSpout.cpp @@ -1,7 +1,12 @@ #include "sample/wordcount/HelloWorldSpout.h" +#include "hurricane/util/StringUtil.h" +#include +#include +#include void HelloWorldSpout::Prepare(std::shared_ptr outputCollector) { - _outputCollector = outputCollector; + _outputCollector = outputCollector; + _words = SplitString("Hello world there are some words we generate new sentence randomly", ' '); } void HelloWorldSpout::Cleanup() { @@ -12,7 +17,23 @@ std::vector HelloWorldSpout::DeclareFields() { } void HelloWorldSpout::NextTuple() { + static int32_t id = 0; + timeval currentTime; + gettimeofday(¤tTime, nullptr); + + int64_t currentMicroseconds = currentTime.tv_sec; + currentMicroseconds *= 1000000; + currentMicroseconds += currentTime.tv_usec; + + ++ id; + + std::vector words(5); + for ( int i = 0; i < 5; i ++ ) { + words[i] = _words[rand() % _words.size()]; + } + + std::string sentence = JoinStrings(words); _outputCollector->Emit({ - "Hello World" - }); -} \ No newline at end of file + sentence, currentMicroseconds, id + }); +} diff --git a/src/sample/wordcount/SplitSentenceBolt.cpp b/src/sample/wordcount/SplitSentenceBolt.cpp index aa40255..072bde4 100755 --- a/src/sample/wordcount/SplitSentenceBolt.cpp +++ b/src/sample/wordcount/SplitSentenceBolt.cpp @@ -2,7 +2,7 @@ #include "hurricane/util/StringUtil.h" void SplitSentenceBolt::Prepare(std::shared_ptr outputCollector) { - _outputCollector = outputCollector; + _outputCollector = outputCollector; } void SplitSentenceBolt::Cleanup() { @@ -13,10 +13,13 @@ std::vector SplitSentenceBolt::DeclareFields() { } void SplitSentenceBolt::Execute(const hurricane::base::Tuple& tuple) { - std::string sentence = tuple[0].ToString(); - std::vector words = SplitString(sentence, ' '); + std::string sentence = tuple[0].GetStringValue(); + int64_t sourceMicroseconds = tuple[1].GetInt64Value(); + int32_t id = tuple[2].GetInt32Value(); - for ( const std::string& word : words ) { - _outputCollector->Emit({ word }); - } -} \ No newline at end of file + std::vector words = SplitString(sentence, ' '); + + for ( const std::string& word : words ) { + _outputCollector->Emit({ word, sourceMicroseconds, id }); + } +} diff --git a/src/sample/wordcount/WordCountBolt.cpp b/src/sample/wordcount/WordCountBolt.cpp index b3669d6..a5a1c1b 100755 --- a/src/sample/wordcount/WordCountBolt.cpp +++ b/src/sample/wordcount/WordCountBolt.cpp @@ -1,11 +1,22 @@ #include "sample/wordcount/WordCountBolt.h" #include "hurricane/util/StringUtil.h" +#include +#include + +std::string itos(int number) { + std::ostringstream ss; + ss << number; + + return ss.str(); +} void WordCountBolt::Prepare(std::shared_ptr outputCollector) { _outputCollector = outputCollector; + _logFile = new std::ofstream("timestamp" + itos(rand()) + ".txt"); } void WordCountBolt::Cleanup() { + delete _logFile; } std::vector WordCountBolt::DeclareFields() { @@ -13,7 +24,9 @@ std::vector WordCountBolt::DeclareFields() { } void WordCountBolt::Execute(const hurricane::base::Tuple& tuple) { - std::string word = tuple[0].ToString(); + std::string word = tuple[0].GetStringValue(); + int64_t sourceMicroseconds = tuple[1].GetInt64Value(); + int32_t id = tuple[2].GetInt32Value(); auto wordCountIterator = _wordCounts.find(word); if ( wordCountIterator == _wordCounts.end() ) { @@ -23,5 +36,15 @@ void WordCountBolt::Execute(const hurricane::base::Tuple& tuple) { wordCountIterator->second ++; + std::cout << word << ' ' << wordCountIterator->second << std::endl; _outputCollector->Emit({ word, wordCountIterator->second }); -} \ No newline at end of file + + timeval currentTime; + gettimeofday(¤tTime, nullptr); + + int64_t currentMicroseconds = currentTime.tv_sec; + currentMicroseconds *= 1000000; + currentMicroseconds += currentTime.tv_usec; + + *_logFile << sourceMicroseconds << ' ' << currentMicroseconds << std::endl; +} diff --git a/src/sample/wordcount/WordCountTopology.cpp b/src/sample/wordcount/WordCountTopology.cpp index d1814c5..9530b92 100755 --- a/src/sample/wordcount/WordCountTopology.cpp +++ b/src/sample/wordcount/WordCountTopology.cpp @@ -12,12 +12,12 @@ hurricane::topology::Topology* GetTopology() { .ParallismHint(1); topology->SetBolt("split-sentence-bolt", new SplitSentenceBolt) - .Global("hello-world-spout") + .Random("hello-world-spout") .ParallismHint(3); - topology->SetBolt("word-count-bolt", new WordCountBolt) - .Group("split-sentence-bolt", "word") - .ParallismHint(2); + topology->SetBolt("word-count-bolt", new WordCountBolt) + .Field("split-sentence-bolt", "word") + .ParallismHint(2); return topology; -} \ No newline at end of file +} diff --git a/target/run/avg.js b/target/run/avg.js new file mode 100644 index 0000000..6f7c78d --- /dev/null +++ b/target/run/avg.js @@ -0,0 +1,30 @@ +'use strict'; + +const fs = require('fs'); + +const timestampTexts = fs.readFileSync('timestamp.txt', 'utf8'); +const timestamps = timestampTexts.split('\n').map(line => { + line = line.trim(); + const parts = line.split(' '); + + return { + start: Number(parts[0]), + end: Number(parts[1]) + }; +}); + +let totalResponse = 0; +let timestampCount = 0; +timestamps.forEach(timestamp => { + if ( timestamp.start && timestamp.end ) { + totalResponse += (timestamp.end - timestamp.start); + timestampCount ++; + } +}); + +console.log(totalResponse); + +const avg = totalResponse / timestampCount; +console.log(avg); + +// 133 diff --git a/target/run/deploy.sh b/target/run/deploy.sh new file mode 100755 index 0000000..d39f3c9 --- /dev/null +++ b/target/run/deploy.sh @@ -0,0 +1,3 @@ +cp -u ../bin/linux/x64/Release/nimbus . +cp -u ../bin/linux/x64/Release/supervisor . +cp -u ../lib/linux/x64/Release/libwordcount.so . diff --git a/target/run/nimbus b/target/run/nimbus new file mode 100755 index 0000000..9c5ad91 Binary files /dev/null and b/target/run/nimbus differ diff --git a/target/run/nimbus.properties b/target/run/nimbus.properties index 1153ab5..e37eafc 100644 --- a/target/run/nimbus.properties +++ b/target/run/nimbus.properties @@ -1,2 +1,5 @@ -nimbus.host = 127.0.0.1 -nimbus.port = 6009 +nimbus.host = 127.0.0.1 +nimbus.port = 6009 +nimbus.supervisor.count = 3 + +topology.name = wordcount diff --git a/target/run/run_nimbus.sh b/target/run/run_nimbus.sh new file mode 100755 index 0000000..1cf30d2 --- /dev/null +++ b/target/run/run_nimbus.sh @@ -0,0 +1,5 @@ +./deploy.sh + +export LD_LIBRARY_PATH=. + +./nimbus nimbus.properties diff --git a/target/run/run_supervisor.sh b/target/run/run_supervisor.sh new file mode 100755 index 0000000..dd08dab --- /dev/null +++ b/target/run/run_supervisor.sh @@ -0,0 +1,6 @@ +./deploy.sh + +export LD_LIBRARY_PATH=. + +config_file="supervisor$1".properties +./supervisor "${config_file}" diff --git a/target/run/supervisor b/target/run/supervisor new file mode 100755 index 0000000..2eb22cc Binary files /dev/null and b/target/run/supervisor differ diff --git a/target/run/supervisor1.properties b/target/run/supervisor1.properties index 5f59d53..515980b 100644 --- a/target/run/supervisor1.properties +++ b/target/run/supervisor1.properties @@ -1,9 +1,13 @@ -nimbus.host = 127.0.0.1 -nimbus.port = 6009 - -supervisor.host = 127.0.0.1 -supervisor.port = 7001 - -supervisor.name = s1 -supervisor.spout.num = 2 -supervisor.bolt.num = 3 +nimbus.host = 127.0.0.1 +nimbus.port = 6009 + +supervisor.host = 127.0.0.1 +supervisor.port = 7001 + +supervisor.name = s1 +supervisor.spout.num = 2 +supervisor.bolt.num = 3 + +topology.name = wordcount + +spout.flow.param = 5000 diff --git a/target/run/supervisor2.properties b/target/run/supervisor2.properties index 58ea45d..21e2b7b 100644 --- a/target/run/supervisor2.properties +++ b/target/run/supervisor2.properties @@ -1,9 +1,13 @@ -nimbus.host = 127.0.0.1 -nimbus.port = 6009 - -supervisor.host = 127.0.0.1 -supervisor.port = 7002 - -supervisor.name = s2 -supervisor.spout.num = 2 -supervisor.bolt.num = 3 +nimbus.host = 127.0.0.1 +nimbus.port = 6009 + +supervisor.host = 127.0.0.1 +supervisor.port = 7002 + +supervisor.name = s2 +supervisor.spout.num = 2 +supervisor.bolt.num = 3 + +topology.name = wordcount + +spout.flow.param = 5000 diff --git a/target/run/supervisor3.properties b/target/run/supervisor3.properties index 50d4131..9809f2d 100644 --- a/target/run/supervisor3.properties +++ b/target/run/supervisor3.properties @@ -1,9 +1,13 @@ -nimbus.host = 127.0.0.1 -nimbus.port = 6009 - -supervisor.host = 127.0.0.1 -supervisor.port = 7003 - -supervisor.name = s3 -supervisor.spout.num = 2 -supervisor.bolt.num = 3 +nimbus.host = 127.0.0.1 +nimbus.port = 6009 + +supervisor.host = 127.0.0.1 +supervisor.port = 7003 + +supervisor.name = s3 +supervisor.spout.num = 2 +supervisor.bolt.num = 3 + +topology.name = wordcount + +spout.flow.param = 5000 diff --git a/tools/build.sh b/tools/build.sh new file mode 100755 index 0000000..9c95843 --- /dev/null +++ b/tools/build.sh @@ -0,0 +1,47 @@ +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +KAKEFILE_DIR="${DIR}/../kake/Hurricane" +BUILD_DIR="${DIR}/../target/build/linux/x64/Release" + +source "${DIR}/deps.sh" + +echo "[INFO] Check NodeJS" +NODE_PATH=$("${DIR}/check_tool.sh" NodeJS node ../deps/node) +NODE_DIR=$(dirname ${NODE_PATH}) + +echo "[INFO] Check Kake" +KAKE_PATH=$("${DIR}/check_tool.sh" Kake kake ../deps/kake) +KAKE_DIR=$(dirname ${KAKE_PATH}) + +echo "${NODE_DIR}" +echo "${KAKE_DIR}" + +if [ -z "${KAKE_DIR}" ]; then + echo "You have not installed kake, do you want to install it?(Y/N)" + read to_install_kake + + if [ "${to_install_kake}" == "N" ]; then + make install + exit 0 + fi +fi + +if [ -z "${NODE_DIR}" ]; then + echo "[INFO] Download NodeJS" + "${DIR}/download_tool.sh" node +fi + +if [ -z "${KAKE_DIR}" ]; then + echo "[INFO] Download Kake" + "${DIR}/download_tool.sh" kake "${KAKE_URL}" +fi + +pushd . + +cd "${KAKEFILE_DIR}" +npm install +"${KAKE_DIR}/kake" generate + +cd "${BUILD_DIR}" +make + +popd diff --git a/tools/check_tool.sh b/tools/check_tool.sh new file mode 100755 index 0000000..b7cb387 --- /dev/null +++ b/tools/check_tool.sh @@ -0,0 +1,15 @@ +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +TOOL_NAME=$1 +TOOL_EXEC=$2 +TOOL_DIR="${DIR}/../deps" + +TOOL_PATH=$(which ${TOOL_EXEC}) + +if [ -z "${TOOL_PATH}" ]; then + if [ -f "${TOOL_DIR}/${TOOL_NAME}" ]; then + echo "${TOOL_DIR}/${TOOL_NAME}" + fi +fi + +echo ${TOOL_PATH} diff --git a/tools/config.sh b/tools/config.sh new file mode 100644 index 0000000..7614a03 --- /dev/null +++ b/tools/config.sh @@ -0,0 +1,2 @@ +export MANAGER_USER=manager +export HURRICANE_HOME=/opt/hurricane diff --git a/tools/deps.sh b/tools/deps.sh new file mode 100644 index 0000000..310ca18 --- /dev/null +++ b/tools/deps.sh @@ -0,0 +1,4 @@ +NODE_EXEC="node" +NODE_URL="" +KAKE_EXEC="kake" +KAKE_URL="http://127.0.0.1:8080/kake.tar.gz" diff --git a/tools/download_tool.sh b/tools/download_tool.sh new file mode 100755 index 0000000..1620259 --- /dev/null +++ b/tools/download_tool.sh @@ -0,0 +1,10 @@ +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +TOOL_NAME=$1 +TOOL_URL=$2 + +wget "${TOOL_URL}" -o "${TOOL_NAME}" + +if [ -f "${DIR}/../deps/${TOOL_NAME}" ]; then + echo "${DIR}/../deps/${TOOL_NAME}" +fi diff --git a/tools/install_managers.sh b/tools/install_managers.sh new file mode 100755 index 0000000..ffeb5ea --- /dev/null +++ b/tools/install_managers.sh @@ -0,0 +1,8 @@ +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +source "${DIR}/config.sh" + +while read node; do + scp "${DIR}../" -r "${MANAGER_USER}"@"$node":"${HURRICANE_HOME}" + ssh "$node" 'cd ${HURRICANE_HOME};tools/build.sh' +done < nodes.list diff --git a/tools/nodes.list b/tools/nodes.list new file mode 100644 index 0000000..8893669 --- /dev/null +++ b/tools/nodes.list @@ -0,0 +1,3 @@ +192.168.101.100 +192.168.101.101 +192.168.101.102