Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

first implementation

  • Loading branch information...
commit 9590f04a2ccdcf0f2573529f53829625dd497e37 1 parent 6431576
hfan authored
28 tagcloud_package/LICENSE.txt
... ... @@ -0,0 +1,28 @@
  1 +
  2 +Portions of this software Copyright (c) 2011 by Vertica, an HP
  3 +Company. All rights reserved.
  4 +
  5 +Redistribution and use in source and binary forms, with or without
  6 +modification, are permitted provided that the following conditions are
  7 +met:
  8 +
  9 +- Redistributions of source code must retain the above copyright
  10 + notice, this list of conditions and the following disclaimer.
  11 +
  12 +- Redistributions in binary form must reproduce the above copyright
  13 + notice, this list of conditions and the following disclaimer in the
  14 + documentation and/or other materials provided with the distribution.
  15 +
  16 +
  17 +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  18 +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  19 +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  20 +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  21 +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  22 +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  23 +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  24 +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  25 +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  26 +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  27 +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  28 +
90 tagcloud_package/Makefile
... ... @@ -0,0 +1,90 @@
  1 +############################
  2 +# Vertica Analytic Database
  3 +#
  4 +# Makefile to build package directory
  5 +#
  6 +# Copyright 2011 Vertica Systems, an HP Company
  7 +############################
  8 +
  9 +SDK?=/opt/vertica/sdk
  10 +VSQL?=vsql
  11 +
  12 +VERTICA_SDK_INCLUDE = $(SDK)/include
  13 +SIMULATOR_PATH = $(SDK)/simulator
  14 +
  15 +THIRD_PARTY = $(shell pwd)/src/third-party
  16 +THIRD_PARTY_INCLUDE = $(THIRD_PARTY)/include
  17 +
  18 +# Add in your source files below
  19 +BUILD_FILES = build/Vertica.o \
  20 + $(addprefix build/,TagCloud.o)
  21 +
  22 +# Define the .so name here (and update the references in ddl/install.sql and ddl/uninstall.sql)
  23 +PACKAGE_LIBNAME = lib/TagCloud.so
  24 +
  25 +CXX=g++
  26 +CXXFLAGS=-g -D HAVE_LONG_LONG_INT_64 -c -I ../include -Wall -Wno-unused-value -fPIC -I $(VERTICA_SDK_INCLUDE) -I $(THIRD_PARTY_INCLUDE)
  27 +LDFLAGS=-shared
  28 +
  29 +# add optimization if not a debug build
  30 +# (make DEBUG=true" will make a non-optimized build)
  31 +ifndef DEBUG
  32 +CXXFLAGS+= -O3 -DNDEBUG
  33 +CFLAGS+= -O3 -DNDEBUG
  34 +endif
  35 +
  36 +.PHONEY: $(PACKAGE_LIBNAME)
  37 +all: $(PACKAGE_LIBNAME)
  38 +
  39 +# Main target that builds the package library
  40 +$(PACKAGE_LIBNAME): $(BUILD_FILES)
  41 + mkdir -p lib
  42 + $(CXX) $(LDFLAGS) -o $@ $(BUILD_FILES)
  43 +
  44 +# rule to make build/XXX.so from src/XXX.so
  45 +build/%.o: src/%.cpp
  46 + @mkdir -p build
  47 + $(CXX) $(CXXFLAGS) $< -o $@
  48 +
  49 +# rule to compile symbols from the vertica SDK:
  50 +build/Vertica.o: $(VERTICA_SDK_INCLUDE)/Vertica.cpp
  51 + @mkdir -p build
  52 + $(CXX) $(CXXFLAGS) $(VERTICA_SDK_INCLUDE)/Vertica.cpp -o $@
  53 +
  54 +# example rule to make build/XX.o from third-party/src/*.c
  55 +#build/%.o: $(THIRD_PARTY)/src/%.c
  56 +# @mkdir -p build
  57 +# $(CXX) $(CXXFLAGS) $< -o $@
  58 +
  59 +
  60 +# Targets to install and uninstall the library and functions
  61 +install: $(PACKAGE_LIBNAME) ddl/install.sql
  62 + $(VSQL) -f ddl/install.sql
  63 +uninstall: ddl/uninstall.sql
  64 + $(VSQL) -f ddl/uninstall.sql
  65 +
  66 +# run examples
  67 +test:
  68 + $(VSQL) -f examples/TagCloud.sql
  69 +
  70 +clean:
  71 + rm -rf build
  72 + rm -rf lib
  73 + rm -f vsim*
  74 + rm -f output.html
  75 +
  76 +##############
  77 +# Advanced upsage: use simulator to debug and test
  78 +##############
  79 +
  80 +DEBUG_DATA_PATH = $(shell pwd)/test-data
  81 +
  82 +# Run the simulator against the functons
  83 +sim_test: all simulator
  84 + $(VALGRIND) ./vsim $(PACKAGE_LIBNAME) FunctionName $(DEBUG_DATA_PATH)/testdata.txt
  85 +
  86 +# build the simulator (in SIMULATOR_PATH) and simlink it here
  87 +simulator:
  88 + $(MAKE) -C $(SIMULATOR_PATH)
  89 + ln -f -s $(SIMULATOR_PATH)/vsim
  90 +
103 tagcloud_package/README.txt
... ... @@ -0,0 +1,103 @@
  1 +-------------------------------
  2 +INTRODUCTION
  3 +-------------------------------
  4 +
  5 +This library contains functions to generate Tag Cloud for a given key word
  6 +with in a provided text corpus. When using Wikipedia as the text corpus, it
  7 +achives similar effect as a previous Hadoop implementation:
  8 +http://www.youtube.com/watch?feature=player_detailpage&v=2Iz5V9MrkBg#t=120s
  9 +
  10 +Two types of functions are implemented to achieve the goal:
  11 +
  12 +The first type is to gather the relevant words and their relevance scores to the
  13 +key word, RelevantWords or RelevantWordsNoLoad can be used for the purpose when
  14 +the text corpus is already loaded into Vertica, or the text corpus is an
  15 +external file respectively.
  16 +
  17 +The second one is generate Tag Cloud in HTML taking the words and their
  18 +relevance scores, which is the output of the previous function. The function
  19 +name is GenerateTagCloud.
  20 +
  21 +See examples/example_tag_cloud.html as an example of the result in visual
  22 +effect when 'vertica' is use as the key word and the whole wikipedia is used as
  23 +searched corpus (download available at
  24 +http://en.wikipedia.org/wiki/Wikipedia:Database_download)
  25 +
  26 +
  27 +-------------------------------
  28 +BUILDING
  29 +-------------------------------
  30 +
  31 +To build:
  32 +
  33 +$ make
  34 +
  35 +
  36 +-------------------------------
  37 +INSTALLING / UNINSTALLING
  38 +-------------------------------
  39 +
  40 +Assuming vsql is in your path, just do:
  41 +
  42 +$ make install
  43 +$ make uninstall
  44 +
  45 +Alternately, you can find the DDL that 'make install' uses in:
  46 + src/ddl/install.sql
  47 +and
  48 + src/ddl/uninstall.sql
  49 +
  50 +-------------------------------
  51 +USAGE
  52 +-------------------------------
  53 +
  54 +RelevantWords('key_word', 'text_columns')
  55 +
  56 +Arguments:
  57 +key_word - the search key word
  58 +text_columns - the varchar columns containing text corpus, there is no
  59 + restriction about how the column is orgnized/ordered, the
  60 + function just treats the input as a stream of incoming words
  61 +
  62 +Output columns:
  63 +weight - the relevance score of the word
  64 +word - the words that the algorithm considers relevant to the key word
  65 +
  66 +
  67 +
  68 +RelevantWordsNoLoad('key_word', 'corpus_file_name')
  69 +
  70 +Arguments:
  71 +key_word - the search key word
  72 +corpus_file_name - the file name of the text corpus, this function is helpful
  73 + when the corpus data is not loaded into Vertica
  74 +
  75 +Output columns:
  76 +Same as RelevantWords()
  77 +
  78 +
  79 +GenerateTagCloud('score', 'word', 'html_file_name')
  80 +
  81 +Arguments:
  82 +sore - the relevance score of the word from RelevantWordsNoLoad()
  83 + or RelevantWords()
  84 +word - the relevant word
  85 +html_file_name - the file name to for the generated HTML file
  86 +
  87 +Output columns:
  88 +status - the status of HTML file generation
  89 +
  90 +-------------------------------
  91 +PERFORMANCE
  92 +-------------------------------
  93 +
  94 +The function is relatively disk IO heavy. On a laptop, using the whole 33G
  95 +uncompressed wikipedia as the text corpus, it finishes in about 6~7 minutes
  96 +with disk utility above 90% , as a comparison simply 'cat' the text corpus
  97 +into /dev/null also taks a little bit over 6 minutes.
  98 +
  99 +-------------------------------
  100 +LICENSE
  101 +-------------------------------
  102 +
  103 +Please see LICENSE.txt
10 tagcloud_package/ddl/install.sql
... ... @@ -0,0 +1,10 @@
  1 +-- Installaction script: defined the shared library and the appropriate entry poings
  2 +
  3 +select version();
  4 +
  5 +\set libfile '\''`pwd`'/lib/TagCloud.so\'';
  6 +
  7 +CREATE LIBRARY TagCloudLib as :libfile;
  8 +create transform function RelevantWords as language 'C++' name 'RelevantWordsFactory' library TagCloudLib;
  9 +create transform function RelevantWordsNoLoad as language 'C++' name 'RelevantWordsNoLoadFactory' library TagCloudLib;
  10 +create transform function GenerateTagCloud as language 'C++' name 'GenerateTagCloudFactory' library TagCloudLib;
1  tagcloud_package/ddl/uninstall.sql
... ... @@ -0,0 +1 @@
  1 +DROP LIBRARY TagCloudLib CASCADE;
24 tagcloud_package/examples/TagCloud.sql
... ... @@ -0,0 +1,24 @@
  1 +\set textCorpus '\''`pwd`'/examples/text_corpus.txt\''
  2 +\set htmlFile '\''`pwd`'/output.html\''
  3 +
  4 +\echo ***************** Search 'vertica' in the small text_corpus.txt *****************
  5 +select RelevantWordsNoLoad('vertica', :textCorpus) over() order by weight desc limit 20;
  6 +
  7 +
  8 +
  9 +\echo ***************** Load text_corpus.txt into a table first, and then search 'vertica' in it *****************
  10 +create table alltext(line varchar(64000));
  11 +copy alltext(line) from :textCorpus DELIMITER E'\n';
  12 +
  13 +select RelevantWords('vertica', line) over() from alltext order by weight desc limit 20;
  14 +
  15 +drop table alltext cascade;
  16 +
  17 +
  18 +
  19 +\echo ****************************** Generate HTML to show the graphical effect *************************
  20 +\echo ****** This generates output.html in current direcotry, use your favoriate browser to see it ******
  21 +drop table words cascade;
  22 +create table words(weight float, word varchar);
  23 +insert into words select RelevantWordsNoLoad('vertica', :textCorpus) over() order by weight desc limit 50;
  24 +select GenerateTagCloud(weight, word, :htmlFile) over () from words;
1  tagcloud_package/examples/example_tag_cloud.html
... ... @@ -0,0 +1 @@
  1 +<p class="tag_cloud"></p><span style="font-size: 10px; color: green">xconomy</span><span style="font-size: 13px; color: orange">world</span><span style="font-size: 60px; color: red">database</span><span style="font-size: 10px; color: green">zacks</span><span style="font-size: 13px; color: blue">xml</span><span style="font-size: 12px; color: green">computerworld</span><span style="font-size: 15px; color: orange">minutes</span><span style="font-size: 10px; color: red">preserve</span><span style="font-size: 13px; color: red">business</span><span style="font-size: 17px; color: red">date</span><p class="tag_cloud"></p><span style="font-size: 11px; color: red">space</span><span style="font-size: 10px; color: blue">oriented</span><span style="font-size: 10px; color: red">based</span><span style="font-size: 10px; color: blue">approach</span><span style="font-size: 14px; color: orange">press</span><span style="font-size: 11px; color: black">content</span><span style="font-size: 10px; color: red">butterfly</span><span style="font-size: 15px; color: orange">update</span><span style="font-size: 13px; color: green">oracle</span><span style="font-size: 10px; color: blue">emc</span><p class="tag_cloud"></p><span style="font-size: 16px; color: red">hewlett</span><span style="font-size: 10px; color: black">newswire</span><span style="font-size: 14px; color: green">billerica</span><span style="font-size: 24px; color: blue">data</span><span style="font-size: 17px; color: black">revision</span><span style="font-size: 11px; color: red">uploads</span><span style="font-size: 18px; color: blue">source</span><span style="font-size: 13px; color: orange">infobox</span><span style="font-size: 10px; color: orange">kanaracus</span><span style="font-size: 16px; color: red">software</span><p class="tag_cloud"></p><span style="font-size: 10px; color: blue">logo</span><span style="font-size: 12px; color: red">acopia</span><span style="font-size: 10px; color: green">slaughters</span><span style="font-size: 20px; color: green">boston</span><span style="font-size: 10px; color: red">hpinfo</span><span style="font-size: 31px; color: blue">news</span><span style="font-size: 11px; color: orange">timestamp</span><span style="font-size: 10px; color: green">times</span><span style="font-size: 10px; color: orange">example</span><span style="font-size: 14px; color: black">height</span><p class="tag_cloud"></p><span style="font-size: 11px; color: black">appliance</span><span style="font-size: 11px; color: black">class</span><span style="font-size: 17px; color: green">lynch</span><span style="font-size: 27px; color: red">text</span><span style="font-size: 11px; color: green">feb</span><span style="font-size: 16px; color: green">products</span><span style="font-size: 10px; color: black">record</span><span style="font-size: 17px; color: black">packard</span><span style="font-size: 15px; color: black">plans</span><span style="font-size: 16px; color: orange">specific</span><p class="tag_cloud"></p><span style="font-size: 14px; color: red">integration</span><span style="font-size: 24px; color: black">buy</span><span style="font-size: 10px; color: black">won</span><span style="font-size: 18px; color: orange">publisher</span><span style="font-size: 15px; color: blue">use</span><span style="font-size: 10px; color: blue">homepage</span><span style="font-size: 11px; color: black">growth</span><span style="font-size: 16px; color: orange">pdf</span><span style="font-size: 25px; color: black">sybase</span><span style="font-size: 10px; color: black">technical</span><p class="tag_cloud"></p><span style="font-size: 11px; color: black">greenplum</span><span style="font-size: 10px; color: black">expands</span><span style="font-size: 16px; color: red">article</span><span style="font-size: 12px; color: black">dailymarkets</span><span style="font-size: 14px; color: black">shatter</span><span style="font-size: 11px; color: red">official</span><span style="font-size: 11px; color: black">syntax</span><span style="font-size: 24px; color: green">web</span><span style="font-size: 59px; color: red">systems</span><span style="font-size: 14px; color: black">syncsort</span><p class="tag_cloud"></p><span style="font-size: 30px; color: blue">analytics</span><span style="font-size: 11px; color: black">management</span><span style="font-size: 14px; color: red">names</span><span style="font-size: 23px; color: blue">inc</span><span style="font-size: 10px; color: red">march</span><span style="font-size: 10px; color: black">daily</span><span style="font-size: 20px; color: black">dbms</span><span style="font-size: 14px; color: blue">february</span><span style="font-size: 13px; color: black">bladesystem</span><span style="font-size: 15px; color: green">appoints</span><p class="tag_cloud"></p><span style="font-size: 41px; color: black">analytic</span><span style="font-size: 33px; color: blue">acquisition</span><span style="font-size: 16px; color: green">acquire</span><span style="font-size: 14px; color: orange">etl</span><span style="font-size: 10px; color: black">completed</span><span style="font-size: 10px; color: orange">cloud</span><span style="font-size: 15px; color: orange">get</span><span style="font-size: 11px; color: blue">christopher</span><span style="font-size: 17px; color: orange">hadoop</span><span style="font-size: 10px; color: blue">changes</span><p class="tag_cloud"></p><span style="font-size: 10px; color: black">enterprise</span><span style="font-size: 11px; color: blue">investors</span><span style="font-size: 10px; color: black">new</span><span style="font-size: 11px; color: black">extend</span><span style="font-size: 22px; color: black">ceo</span><span style="font-size: 12px; color: black">journal</span><span style="font-size: 10px; color: orange">monash</span><span style="font-size: 25px; color: orange">company</span><span style="font-size: 12px; color: orange">contributor</span><span style="font-size: 11px; color: red">completes</span>
17 tagcloud_package/examples/text_corpus.txt
... ... @@ -0,0 +1,17 @@
  1 +[[David DeWitt]] and [[Michael Stonebraker]], experts in [[parallel database]]s and [[shared-nothing architecture]]s, have been critical of the breadth of problems that MapReduce can be used for.&lt;ref name=&quot;shark&quot;&gt;{{cite web| url=http://typicalprogrammer.com/?p=16| title=Database Experts Jump the MapReduce Shark}}&lt;/ref&gt; They called its interface too low-level and questioned whether it really represents the [[paradigm shift]] its proponents have claimed it is.&lt;ref name=&quot;ddandms1&quot;&gt;{{cite web| url=http://databasecolumn.vertica.com/database-innovation/mapreduce-a-major-step-backwards/| title=MapReduce: A major step backwards| author=[[David DeWitt]]| coauthors=[[Michael Stonebraker]]| publisher=databasecolumn.com| accessdate=2008-08-27}}&lt;/ref&gt; They challenged the MapReduce proponents' claims of novelty, citing [[Teradata]] as an example of [[prior art]] that has existed for over two decades. They also compared MapReduce programmers to [[CODASYL|Codasyl]] programmers, noting both are &quot;writing in a [[Low-level programming language|low-level language]] performing low-level record manipulation.&quot;&lt;ref name=&quot;ddandms1&quot;/&gt; MapReduce's use of input files and lack of [[Logical schema|schema]] support prevents the performance improvements enabled by common database system features such as [[B-tree]]s and [[Partition (database)|hash partitioning]], though projects such as [[Pig (programming language)|Pig (or PigLatin)]], [[Sawzall (programming language)|Sawzall]], [[Apache Hive]]&lt;ref name=&quot;ApacheHiveWiki&quot;&gt;{{cite web| url=https://cwiki.apache.org/confluence/display/Hive/Home| title=Apache Hive - Index of - Apache Software Foundation}}&lt;/ref&gt;, [[HBase]]&lt;ref name=&quot;HBase&quot;&gt;{{cite web| url=http://hbase.apache.org/| title=HBase - HBase Home - Apache Software Foundation}}&lt;/ref&gt; and [[BigTable]]&lt;ref name=&quot;HBase&quot;/&gt;&lt;ref name=&quot;BigTablePaper&quot;&gt;{{cite web| url=http://static.googleusercontent.com/external_content/untrusted_dlcp/labs.google.com/en/us/papers/bigtable-osdi06.pdf| title=Bigtable: A Distributed Storage System for Structured Data| format=PDF}}&lt;/ref&gt; are addressing some of these problems.{{Citation needed|date=December 2010}}
  2 +[[Image:Go-inkscape.png|thumb|303px|...and done using [[Inkscape]]]]Wow, I was an utter fool to ever have used paint...from now on I'll take the trouble to download gimp at school; I thought gimp was unnecessary for simple tasks of creating geometric images, but apparently I was wrong...I didn't realise there was a built-in protractor. Anyway thanks, Chuck, your explanation of the formula helped me confirm the coordinates, although paint, for vertica/y-axis parts of coordinates, &quot;up&quot; means a lesser value, and &quot;down&quot; means a higher one, so it was actually (51, 289) and (353,289)....as for &quot;ancient technology&quot;, hey this method is part of the [[Euclid]]ean era! ;-) Thanks all! I'm going to save this explanation in my notepad as a wonderful example of how trigonometry affects graphical design. :D Onwards! (Oh, does anyone feel up to the task of helping me antialiase the hundreds of images (or going to be used for Xiangqi) in [[Template:xiangqi-position]] and [[Template:Game of Go Position]]?) -- [[User:Natalinasmpf|Natalinasmpf]] 20:14, 19 July 2005 (UTC)
  3 +On November 14, 2008, DMExpress set a world record for ETL performance by extracting, transforming, cleansing, and loading 5.4 TB of data into a [[Vertica]] Analytic Database on a c-Class [[HP]] BladeSystem in 57 minutes &lt;ref&gt;[http://www.vertica.com/_pdf/ETL-World-Record-Audit-Report.pdf ETL Database Load Benchmark: Full Disclosure Report (November 14, 2008)]&lt;/ref&gt;&lt;ref&gt;[http://www.betanews.com/newswire/pr/Syncsort_and_Vertica_Shatter_Database_ETL_World_Record_Using_HP_BladeSystem_cClass/153209 BetaNews Newswire (December 2, 2008)]&lt;/ref&gt;. [[Microsoft]] and [[Unisys]] set the previous ETL world record at 2.36 TB/hr in early 2008 &lt;ref&gt;[http://blogs.msdn.com/sqlperf/archive/2008/02/27/etl-world-record.aspx SQL Server Performance: ETL World Record! (February 27, 2008) ]&lt;/ref&gt;.
  4 +|Source=http://www.vertica.com/wp-content/uploads/2011/03/Vertica-HP.png
  5 +HP Software is the [[Enterprise software]] division of information technology company [[Hewlett-Packard]] (HP). From September 2005 through 2010, HP purchased a total of 15 software companies.&lt;ref&gt;[http://www.eweek.com/c/a/IT-Infrastructure/HP-Is-Serious-About-Software-25-Reasons-Why-585952/ eWeek.com: “HP Is Serious About Software: 25 Reasons Why” Taft. Dec. 2010]&lt;/ref&gt; According to Software Magazine, HP is the 3rd largest software company in the world in total software revenue, behind [[IBM ]] and [[Microsoft]] as the first and second largest, respectively.&lt;ref&gt;Software Magazine, 29th Annual Software 500 Ranking. September 27, 2011 [http://online.qmags.com/SWM0911]&lt;/ref&gt; In May 2010, HP announced that Bill Veghte would serve as the executive vice president of HP Software.&lt;ref&gt;[http://news.cnet.com/8301-13860_3-20004164-56.html CNET News: “Former Microsoft exec Veghte headed to HP.” Ina Fried May 5, 2010.]&lt;/ref&gt; Veghte formerly led Microsoft's Windows business. HP continued to acquire software and technology businesses in 2010.&lt;ref&gt;[http://www.ft.com/cms/s/2/e7ace394-bec1-11df-a755-00144feab49a.htm Financial Times:“HP expands in security with ArcSight deal.” Joseph Menn. Sept. 13, 2010.]&lt;/ref&gt;&lt;ref&gt;[http://www.crn.com/news/security/227400207/hp-acquisition-train-keeps-rolling-with-arcsight.htm CRN: “HP Acquisition Train Keeps Rolling With ArcSight.” Hickey. Sept. 13, 2010.]&lt;/ref&gt; In August 2010, HP announced it would acquire [[Fortify Software]], a software security assurance company, specializing in static application security analysis.&lt;ref&gt;[http://www.seattletimes.nwsource.com/.../2012968476_apushewlettpackardfortifysoftware.html Seattle Times: &quot;HP completes deal for Fortify Software.&quot; Sept. 22, 2010.]&lt;/ref&gt;&lt;ref&gt;name=Yahoo.com&gt;[http://finance.yahoo.com/news/HP-to-Acquire-Fortify-bw-3566564602.html?x=0&amp;.v=1 HP to Acquire Fortify Software, Helping Clients Proactively Reduce Business Risks of Insecure Software] {{en icon}} , ''[[Yahoo.com]]'', August 17, 2010&lt;/ref&gt; Also in August 2010, HP announced the acquisition of Denver-based [[Stratavia]], a privately held database and application automation company for [[cloud computing]].&lt;ref&gt;[http://www.mercurynews.com/bay-area-news/ci_15901967?source=rss Mercury News. HP buys Denver software maker Stratavia to simplify 'cloud computing.' August 2010.]&lt;/ref&gt; In mid-September 2010, HP announced it had signed a definitive agreement to acquire [[ArcSight]] (Nasdaq: ARST), a security and compliance software provider, for $43.50 per share, or approximately $1.5 billion.&lt;ref&gt;[http://www.thestreet.com/story/10858380/2/hp-to-acquire-arcsight.html Thestreet.com: “HP To Acquire ArcSight.” Sept. 13, 2010]&lt;/ref&gt; On October 22, 2010, HP announced it had completed its acquisition of ArcSight.&lt;ref&gt;[http://www.mercurynews.com/breaking-news/ci_16406958?nclick_check=1 San Jose Mercury News: “Hewlett-Packard completes $1.5B ArcSight acquisition.” Russell. October 2010]&lt;/ref&gt; The acquisitions of Fortify, Arcsight and TippingPoint are now being integrated into HP's IT security software portfolio.&lt;ref&gt;[http://www.v3.co.uk/v3/news/2274840/rsa-hp-risk-management RSA: “HP calls for new approach to risk analysis.” Nichols. Feb. 2011.]&lt;/ref&gt; In Feb. 2011, HP announced it would acquire, real-time analytics platform company [[Vertica]], a privately-held firm based in Billerica, Mass.&lt;ref&gt;[http://www.dailymarkets.com/stock/2011/02/15/hewlett-packard-to-get-vertica/ DailyMarkets.com: “Hewlett-Packard To Get Vertica.” Zacks Investment Research. February 15, 2011.]&lt;/ref&gt; On March 22, 2011, HP completed its acquisition of Vertica to expands HP’s information optimization, business intelligence and analytics portfolio for large enterprise companies and the public sector.&lt;ref&gt;[http://www.computerworld.com/s/article/9209327/Update_HP_to_buy_Vertica_for_analytics?source=rss_news ComputerWorld.com: “Update: HP to buy Vertica for analytics.” Kanaracus. Feb. 2011.]&lt;/ref&gt;
  6 +&lt;div class=&quot;boilerplate&quot; style=&quot;margin:0.5em auto;width:80%;background-color:#f7f8ff;border:2px solid #8888aa; padding:4px;font-size:85%;min-height:64px;vertica</comment>
  7 +The data sorting and transformation capabilities described by Varsegi for mainframe systems are provided as well in DMExpress, Syncsort's [[data integration]] product for UNIX, Windows, and Linux. DMExpress is typically used for [[Extract, transform, load|ETL]], [[data warehousing]], and [[business intelligence]] applications.&lt;ref&gt;[http://www.b-eye-network.com/listen/5846 Audio Interview with Syncsort's Rich Pilkington]&lt;/ref&gt; The program is designed to transform and consolidate data from multiple sources. On November 14, 2008, DMExpress set a world record for ETL performance by extracting, transforming, cleansing, and loading 5.4 TB of data into a [[Vertica]] Analytic Database on a c-Class [[HP]] BladeSystem in 57 minutes.&lt;ref&gt;[http://www.vertica.com/_pdf/ETL-World-Record-Audit-Report.pdf ETL Database Load Benchmark: Full Disclosure Report (November 14, 2008)]&lt;/ref&gt;&lt;ref&gt;[http://www.betanews.com/newswire/pr/Syncsort_and_Vertica_Shatter_Database_ETL_World_Record_Using_HP_BladeSystem_cClass/153209 BetaNews Newswire (December 2, 2008)]&lt;/ref&gt; [[Microsoft]] and [[Unisys]] set the previous ETL world record at 2.36 TB/hr in early 2008.&lt;ref&gt;[http://blogs.msdn.com/sqlperf/archive/2008/02/27/etl-world-record.aspx SQL Server Performance: ETL World Record! (February 27, 2008) ]&lt;/ref&gt; System administrators and analysts often use DMExpress to pre-process data to speed database loads, to create and maintain aggregate data stores&lt;ref&gt;[[Data store network]]&lt;/ref&gt; from flat files, to optimize reporting,&lt;ref&gt;[http://products.databasejournal.com/dbtools/mgmt/1099337641.html Database Journal Product Guide]&lt;/ref&gt; and for [[changed data capture]] (CDC) applications.&lt;ref&gt;[http://research.pcpro.co.uk/detail/RES/1208450325_439.html PC Pro Research Paper: Enterprise Data Integration Essentials]&lt;/ref&gt; Data warehouse expert, Dr. [[Ralph Kimball]] in the first edition of his popular book, The Data Warehouse Toolkit, explained how data management and sorting products like Syncsort’s can be valuable for Database Management Systems ([[DBMS]]s):
  8 +&lt;div class=&quot;boilerplate&quot; style=&quot;margin:0.5em auto; width:80%; clear:both; background-color:#f7f8ff; border:2px solid #8888aa; padding:4px; font-size:85%; min-height:64px; vertica</comment>
  9 + homepage = [http://www.vertica.com/ www.vertica.com]
  10 +'''Vertica Systems''' is an [[Analytics|analytic]] [[Database management system|database management]] software company.&lt;ref&gt;''Network World'' staff: &quot;New database company raises funds, nabs ex-Oracle bigwigs”, [http://www.linuxworld.com/news/2007/021407-vertica-oracle.html] ''LinuxWorld'', February 14, 2007&lt;/ref&gt;&lt;ref&gt; Brodkin, J: &quot;10 enterprise software companies to watch&quot;, [http://www.networkworld.com/news/2007/041107-enterprise-software-companies-to-watch.html?page=9] ''Network World'', April 11, 2007&lt;/ref&gt; Vertica was founded in 2005 by database researcher [[Michael Stonebraker]], and Andrew Palmer; its President and CEO is [[Christopher P. Lynch]]. HP announced it would acquire the company in February 2011.&lt;ref&gt;[http://www.hp.com/hpinfo/newsroom/press/2011/110214xb.html HP News Release: “HP to Acquire Vertica: Customers Can Analyze Massive Amounts of Big Data at Speed and Scale” Feb. 2011]&lt;/ref&gt; On March 22, 2011, HP completed its acquisition of Vertica.&lt;ref&gt;
  11 +The Vertica Analytic Database runs on [[Grid computing|grids]] of [[Linux]]-based [[Commodity computing|commodity servers]]. It is also available as a hosted DBMS provisioned by and running on the [[Amazon ec2|Amazon Elastic Compute Cloud]]. It has integration with [[Hadoop]].&lt;ref&gt;{{cite web |url=http://www.dbms2.com/2010/10/12/vertica-hadoop-connector-integration/ |title=Vertica-Hadoop integration |date=October 12, 2010 |work=DBMS2}}&lt;/ref&gt;
  12 +In January 2010, it was reported that Vertica won the claims construction hearing,&lt;ref&gt;Monash, C: &quot;Vertica slaughters Sybase in patent litigation”,[http://www.dbms2.com/2010/01/15/vertica-sybase-ipatent-litigation/]''DBMS2'', January 14, 2010&lt;/ref&gt; successfully defending itself from a January 2008 patent-infringement lawsuit filed by [[Sybase]].&lt;ref&gt;{{cite court |litigants = Sybase, Inc. v. Vertica Systems, Inc. |court = Texas Eastern District Court |date = January 30, 2008 |url= http://dockets.justia.com/docket/court-txedce/case_no-6:2008cv00024/case_id-107871/}}&lt;/ref&gt;
  13 +* [http://www.vertica.com Official website]
  14 + <comment>+vertica;</comment>
  15 +|url=http://www.bizjournals.com/boston/print-edition/2011/04/08/billericas-vertica-plans-growth-path.html
  16 +|url=http://www.xconomy.com/boston/2011/03/28/vertica-ceo-chris-lynch-talks-hp-acquisition-fires-back-at-netezza-ibm-in-%E2%80%9Cbig-data%E2%80%9D-battle/
  17 +|url=http://www.vertica.com/news/press/vertica-appoints-christopher-lynch-new-president-and-ceo/
364 tagcloud_package/src/TagCloud.cpp
... ... @@ -0,0 +1,364 @@
  1 +/*
  2 +Portions of this software Copyright (c) 2011 by Vertica, an HP
  3 +Company. All rights reserved.
  4 +
  5 +Redistribution and use in source and binary forms, with or without
  6 +modification, are permitted provided that the following conditions are
  7 +met:
  8 +
  9 +- Redistributions of source code must retain the above copyright
  10 + notice, this list of conditions and the following disclaimer.
  11 +
  12 +- Redistributions in binary form must reproduce the above copyright
  13 + notice, this list of conditions and the following disclaimer in the
  14 + documentation and/or other materials provided with the distribution.
  15 +
  16 +
  17 +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  18 +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  19 +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  20 +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  21 +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  22 +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  23 +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  24 +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  25 +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  26 +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  27 +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  28 +*/
  29 +
  30 +#include <string>
  31 +#include <map>
  32 +#include <list>
  33 +#include <set>
  34 +#include <sstream>
  35 +#include <fstream>
  36 +#include <algorithm>
  37 +#include <queue>
  38 +#include <ctype.h>
  39 +#include <limits>
  40 +#include <stdlib.h>
  41 +
  42 +#include "Vertica.h"
  43 +
  44 +using namespace Vertica;
  45 +
  46 +const size_t MaxWordLen = 25;
  47 +const size_t WordRange = 10;
  48 +const size_t MaxStringLen = 64000;
  49 +
  50 +class WordFreqCalc
  51 +{
  52 +public:
  53 + WordFreqCalc(const std::string &target);
  54 + void updateWordFreq(const std::string &line);
  55 + const std::map<std::string, double> &getWordFreq() { return wordFreq; }
  56 +
  57 +private:
  58 + // function words should not be considered, such as prepositions: 'of', 'and', 'a', 'an', etc
  59 + std::set<std::string> funcWords;
  60 +
  61 + // weighted average of frequency a word appears close to the target word
  62 + std::map<std::string, double> wordFreq;
  63 +
  64 + const std::string &target;
  65 +};
  66 +
  67 +WordFreqCalc::WordFreqCalc(const std::string &target)
  68 +: target(target)
  69 +{
  70 + std::istringstream ss("quot lt gt ref amp apos http www com html htm org url name title index domain link comment diff prev otherlinks page file first last user jpg cite php oldid there also be is was are were able to not can could dare have has had may might must need ought shall should will would a all an another any both each either every her his its my neither no other our per some that the their these this those whatever whichever your accordingly after albeit although and as because before both but consequently either for hence however if neither nevertheless nor once or since so than that then thence therefore tho' though thus till unless until when whenever where whereas wherever whether while whilst yet all another any anybody anyone anything both each either everybody everyone everything few he her hers herself him himself his it its itself many me mine myself neither no_one nobody none nothing one other ours ourselves several she some somebody someone something such that theirs them themselves these they this those us we what whatever which whichever who whoever whom whomever whose you yours yourself yourselves all another any both certain each either enough few fewer less little loads lots many more most much neither no none part several some various aboard about above absent according across after against ahead along alongside amid amidst among amongst anti around as aside astraddle astride at bar barring before behind below beneath beside besides between beyond but by circa concerning considering despite due during except excepting excluding failing following for from given in including inside into less like minus near near next notwithstanding of off on onto opposite out outside over past pending per pertaining regarding respecting round save saving since than through throughout thru till toward towards under underneath unlike until unto upon versus via with within without");
  71 + while (ss) {
  72 + std::string buf;
  73 + ss >> buf;
  74 + funcWords.insert(buf);
  75 + }
  76 +}
  77 +
  78 +void WordFreqCalc::updateWordFreq(const std::string &line)
  79 +{
  80 + std::list<std::string> prevWords;
  81 + bool afterTarget = false; // whether we've seen target within WordRange
  82 + size_t posAfterTarget = 0;
  83 +
  84 + // skip if the string doesn't have the target word
  85 + if (line.find(target) == std::string::npos)
  86 + return;
  87 +
  88 + // transform into lower case, and ignore all non-letter characters
  89 + std::string newline = line;
  90 + for (size_t i = 0; i < newline.size(); ++i) {
  91 + if (::isalpha(newline[i]))
  92 + newline[i] = ::tolower(newline[i]);
  93 + else
  94 + newline[i] = ' ';
  95 + }
  96 +
  97 + std::istringstream ss(newline);
  98 + while (ss) {
  99 + std::string word;
  100 + ss >> word;
  101 +
  102 + // ignore too long or too short words
  103 + if (word.size() > MaxWordLen || word.size() <= 2)
  104 + continue;
  105 +
  106 + // skip function words
  107 + if (funcWords.count(word) > 0)
  108 + continue;
  109 +
  110 + // found the target word
  111 + if (word == target) {
  112 + afterTarget = true;
  113 + posAfterTarget = 0;
  114 +
  115 + // update the frequencies of each previous words
  116 + size_t distance = 1;
  117 + std::list<std::string>::const_reverse_iterator rit;
  118 + for (rit = prevWords.rbegin(); rit != prevWords.rend(); ++rit) {
  119 + wordFreq[*rit] += 1/(double)distance;
  120 + ++distance;
  121 + }
  122 +
  123 + prevWords.clear();
  124 + continue;
  125 + }
  126 +
  127 + // keep track this word, with limited memory
  128 + prevWords.push_back(word);
  129 + while (prevWords.size() > WordRange)
  130 + prevWords.pop_front();
  131 +
  132 + // for words closely after the target words, update their frequencies as well
  133 + if (afterTarget) {
  134 + ++posAfterTarget;
  135 + wordFreq[word] += 1/(double)posAfterTarget;
  136 + if (posAfterTarget >= WordRange)
  137 + afterTarget = false;
  138 + }
  139 + }
  140 +}
  141 +
  142 +class RelevantWords : public TransformFunction
  143 +{
  144 + virtual void processPartition(ServerInterface &srvInterface, PartitionReader &input_reader, PartitionWriter &output_writer)
  145 + {
  146 + const VString &arg0 = input_reader.getStringRef(0);
  147 + const std::string &target = arg0.str();
  148 +
  149 + WordFreqCalc wordFreqCalc(target);
  150 +
  151 + // compute the relevant words and their weights/frequencies
  152 + do {
  153 + const VString &line = input_reader.getStringRef(1);
  154 + if (line.isNull()) continue;
  155 + wordFreqCalc.updateWordFreq(line.str());
  156 + } while (input_reader.next());
  157 +
  158 + // generate output from the computed map
  159 + const std::map<std::string, double> &wordFreq = wordFreqCalc.getWordFreq();
  160 + std::map<std::string, double>::const_iterator it;
  161 + for (it = wordFreq.begin(); it != wordFreq.end(); ++it) {
  162 + output_writer.setFloat(0, it->second);
  163 + VString &word = output_writer.getStringRef(1);
  164 + word.copy(it->first);
  165 + output_writer.next();
  166 + }
  167 + }
  168 +};
  169 +
  170 +class RelevantWordsFactory : public TransformFunctionFactory
  171 +{
  172 + virtual TransformFunction *createTransformFunction(ServerInterface &srvInterface)
  173 + { return vt_createFuncObj(srvInterface.allocator, RelevantWords); }
  174 +
  175 + virtual void getReturnType(ServerInterface &srvInterface, const SizedColumnTypes &input_types, SizedColumnTypes &output_types)
  176 + {
  177 + output_types.addFloat("weight");
  178 + output_types.addVarchar(MaxWordLen, "word");
  179 + }
  180 +
  181 + virtual void getPrototype(ServerInterface &srvInterface, ColumnTypes &argTypes, ColumnTypes &returnType)
  182 + {
  183 + argTypes.addVarchar(); // the key word
  184 + argTypes.addVarchar(); // the column containing text corpus
  185 +
  186 + returnType.addFloat();
  187 + returnType.addVarchar();
  188 + }
  189 +
  190 +};
  191 +
  192 +RegisterFactory(RelevantWordsFactory);
  193 +
  194 +
  195 +class RelevantWordsNoLoad : public TransformFunction
  196 +{
  197 + virtual void processPartition(ServerInterface &srvInterface, PartitionReader &input_reader, PartitionWriter &output_writer)
  198 + {
  199 + const VString &arg0 = input_reader.getStringRef(0);
  200 + const std::string &target = arg0.str();
  201 +
  202 + const VString &arg1 = input_reader.getStringRef(1);
  203 + const std::string &filename = arg1.str();
  204 + std::ifstream infile(filename.c_str(), std::ios::in);
  205 + if (!infile.good())
  206 + vt_report_error(0, "Could not open file %s", filename.c_str());
  207 +
  208 + WordFreqCalc wordFreqCalc(target);
  209 +
  210 + const size_t BLK_SIZE_BYTE = 64*1024; // 64k
  211 + char buf[BLK_SIZE_BYTE];
  212 + while (infile.good()) {
  213 + infile.read(buf, BLK_SIZE_BYTE);
  214 + wordFreqCalc.updateWordFreq(buf);
  215 + }
  216 +
  217 + // generate output from the computed map
  218 + const std::map<std::string, double> &wordFreq = wordFreqCalc.getWordFreq();
  219 + std::map<std::string, double>::const_iterator it;
  220 + for (it = wordFreq.begin(); it != wordFreq.end(); ++it) {
  221 + output_writer.setFloat(0, it->second);
  222 + VString &word = output_writer.getStringRef(1);
  223 + word.copy(it->first);
  224 + output_writer.next();
  225 + }
  226 + }
  227 +};
  228 +
  229 +class RelevantWordsNoLoadFactory : public TransformFunctionFactory
  230 +{
  231 + virtual TransformFunction *createTransformFunction(ServerInterface &srvInterface)
  232 + { return vt_createFuncObj(srvInterface.allocator, RelevantWordsNoLoad); }
  233 +
  234 + virtual void getReturnType(ServerInterface &srvInterface, const SizedColumnTypes &input_types, SizedColumnTypes &output_types)
  235 + {
  236 + output_types.addFloat("weight");
  237 + output_types.addVarchar(MaxWordLen, "word");
  238 + }
  239 +
  240 + virtual void getPrototype(ServerInterface &srvInterface, ColumnTypes &argTypes, ColumnTypes &returnType)
  241 + {
  242 + argTypes.addVarchar(); // the word
  243 + argTypes.addVarchar(); // file name of the text corpus
  244 +
  245 + returnType.addFloat();
  246 + returnType.addVarchar();
  247 + }
  248 +
  249 +};
  250 +
  251 +RegisterFactory(RelevantWordsNoLoadFactory);
  252 +
  253 +
  254 +struct RenderWord
  255 +{
  256 + RenderWord(const std::string &word, int fontsize, const std::string &color)
  257 + : word(word), fontsize(fontsize), color(color)
  258 + { }
  259 +
  260 + std::string word;
  261 + int fontsize;
  262 + std::string color;
  263 +};
  264 +
  265 +int getFontSize(double w_max, double w_min, double w)
  266 +{
  267 + const int font_max = 50;
  268 + const int font_min = 10;
  269 + return font_max * (w - w_min) / (w_max - w_min) + font_min;
  270 +}
  271 +
  272 +bool compare_random(const RenderWord &a, const RenderWord &b)
  273 +{
  274 + return rand() % 2 == 0;
  275 +}
  276 +
  277 +class GenerateTagCloud : public TransformFunction
  278 +{
  279 + virtual void processPartition(ServerInterface &srvInterface, PartitionReader &input_reader, PartitionWriter &output_writer)
  280 + {
  281 + const VString &arg2 = input_reader.getStringRef(2);
  282 + const std::string &filename = arg2.str();
  283 + std::ofstream outfile(filename.c_str(), std::ios::out | std::ios::trunc);
  284 + if (!outfile.good())
  285 + vt_report_error(0, "Could not open file %s for output", filename.c_str());
  286 +
  287 + std::map<std::string, double> wordFreq;
  288 +
  289 + // populate the word requency map, and compute necessary parameters to get font size later
  290 + double w_min = std::numeric_limits<double>::max();
  291 + double w_max = std::numeric_limits<double>::min();
  292 + do {
  293 + double weight = input_reader.getFloatRef(0);
  294 + const VString &word = input_reader.getStringRef(1);
  295 + if (word.isNull()) continue;
  296 + wordFreq[word.str()] = weight;
  297 + w_min = std::min(w_min, weight);
  298 + w_max = std::max(w_max, weight);
  299 + } while (input_reader.next());
  300 +
  301 + // some predefined color used in outputed HTML
  302 + std::vector<std::string> colors;
  303 + colors.push_back("red");
  304 + colors.push_back("blue");
  305 + colors.push_back("orange");
  306 + colors.push_back("green");
  307 + colors.push_back("black");
  308 +
  309 + // randomly generate color, and assign the font size according to their weight
  310 + std::list<RenderWord> renderList;
  311 + std::map<std::string, double>::const_iterator it;
  312 + for (it = wordFreq.begin(); it != wordFreq.end(); ++it) {
  313 + int fz = getFontSize(w_max, w_min, it->second);
  314 + const std::string &color = colors[rand() % colors.size()];
  315 + renderList.push_back(RenderWord(it->first, fz, color));
  316 + }
  317 + // sort by random to shuffle positions of the words
  318 + renderList.sort(compare_random);
  319 +
  320 + // generate output
  321 + const size_t NumWordsPerLine = 10;
  322 + size_t nword = 0;
  323 + std::list<RenderWord>::const_iterator iter;
  324 + std::ostringstream oss;
  325 + for (iter = renderList.begin(); iter != renderList.end(); ++iter) {
  326 + // get a new line
  327 + if (nword % NumWordsPerLine == 0) oss << "<p class=\"tag_cloud\"></p>";
  328 +
  329 + oss << "<span style=\"font-size: " << iter->fontsize << "px; color: "
  330 + << iter->color << "\">"
  331 + << iter->word << "</span>";
  332 + ++nword;
  333 + }
  334 +
  335 + // write the output to file
  336 + outfile << oss.str();
  337 + VString &word = output_writer.getStringRef(0);
  338 + word.copy("HTML file generated!");
  339 + output_writer.next();
  340 + }
  341 +};
  342 +
  343 +class GenerateTagCloudFactory : public TransformFunctionFactory
  344 +{
  345 + virtual TransformFunction *createTransformFunction(ServerInterface &srvInterface)
  346 + { return vt_createFuncObj(srvInterface.allocator, GenerateTagCloud); }
  347 +
  348 + virtual void getReturnType(ServerInterface &srvInterface, const SizedColumnTypes &input_types, SizedColumnTypes &output_types)
  349 + {
  350 + output_types.addVarchar(MaxStringLen, "HTML generate status");
  351 + }
  352 +
  353 + virtual void getPrototype(ServerInterface &srvInterface, ColumnTypes &argTypes, ColumnTypes &returnType)
  354 + {
  355 + argTypes.addFloat(); // weight of the word
  356 + argTypes.addVarchar(); // the word
  357 + argTypes.addVarchar(); // filename of the generated .html file
  358 +
  359 + returnType.addVarchar(); // return the status
  360 + }
  361 +
  362 +};
  363 +
  364 +RegisterFactory(GenerateTagCloudFactory);
6 tagcloud_package/src/third-party/makefile
... ... @@ -0,0 +1,6 @@
  1 +##########################
  2 +# This makefile contains commands to build whatever third-party libraries your
  3 +# functions require. See web_package/src/third-party/makefile for an examplex
  4 +##########################
  5 +
  6 +all:

0 comments on commit 9590f04

Please sign in to comment.
Something went wrong with that request. Please try again.